35 #include "llvm/ADT/IndexedMap.h"
36 #include "llvm/ADT/PointerEmbeddedInt.h"
37 #include "llvm/ADT/STLExtras.h"
38 #include "llvm/ADT/SmallSet.h"
39 #include "llvm/ADT/StringExtras.h"
40 #include "llvm/Frontend/OpenMP/OMPAssume.h"
41 #include "llvm/Frontend/OpenMP/OMPConstants.h"
44 using namespace clang;
45 using namespace llvm::omp;
58 enum DefaultDataSharingAttributes {
62 DSA_firstprivate = 1 << 2,
72 unsigned Modifier = 0;
73 const Expr *RefExpr =
nullptr;
76 bool AppliedToPointee =
false;
77 DSAVarData() =
default;
81 bool AppliedToPointee)
82 : DKind(DKind), CKind(CKind), Modifier(Modifier), RefExpr(RefExpr),
83 PrivateCopy(PrivateCopy), ImplicitDSALoc(ImplicitDSALoc),
84 AppliedToPointee(AppliedToPointee) {}
86 using OperatorOffsetTy =
88 using DoacrossDependMapTy =
89 llvm::DenseMap<OMPDependClause *, OperatorOffsetTy>;
91 enum class UsesAllocatorsDeclKind {
103 unsigned Modifier = 0;
106 llvm::PointerIntPair<const Expr *, 1, bool> RefExpr;
110 bool AppliedToPointee =
false;
112 using DeclSAMapTy = llvm::SmallDenseMap<const ValueDecl *, DSAInfo, 8>;
113 using UsedRefMapTy = llvm::SmallDenseMap<const ValueDecl *, const Expr *, 8>;
114 using LCDeclInfo = std::pair<unsigned, VarDecl *>;
115 using LoopControlVariablesMapTy =
116 llvm::SmallDenseMap<const ValueDecl *, LCDeclInfo, 8>;
119 struct MappedExprComponentTy {
123 using MappedExprComponentsTy =
124 llvm::DenseMap<const ValueDecl *, MappedExprComponentTy>;
125 using CriticalsWithHintsTy =
126 llvm::StringMap<std::pair<const OMPCriticalDirective *, llvm::APSInt>>;
127 struct ReductionData {
128 using BOKPtrType = llvm::PointerEmbeddedInt<BinaryOperatorKind, 16>;
130 llvm::PointerUnion<const Expr *, BOKPtrType> ReductionOp;
131 ReductionData() =
default;
138 ReductionOp = RefExpr;
141 using DeclReductionMapTy =
142 llvm::SmallDenseMap<const ValueDecl *, ReductionData, 4>;
143 struct DefaultmapInfo {
147 DefaultmapInfo() =
default;
149 : ImplicitBehavior(M), SLoc(Loc) {}
152 struct SharingMapTy {
153 DeclSAMapTy SharingMap;
154 DeclReductionMapTy ReductionMap;
155 UsedRefMapTy AlignedMap;
156 UsedRefMapTy NontemporalMap;
157 MappedExprComponentsTy MappedExprComponents;
158 LoopControlVariablesMapTy LCVMap;
159 DefaultDataSharingAttributes DefaultAttr = DSA_unspecified;
164 Scope *CurScope =
nullptr;
170 DoacrossDependMapTy DoacrossDepends;
175 unsigned AssociatedLoops = 1;
176 bool HasMutipleLoops =
false;
177 const Decl *PossiblyLoopCounter =
nullptr;
178 bool NowaitRegion =
false;
179 bool UntiedRegion =
false;
180 bool CancelRegion =
false;
181 bool LoopStart =
false;
182 bool BodyComplete =
false;
187 Expr *TaskgroupReductionRef =
nullptr;
196 llvm::DenseMap<CanonicalDeclPtr<const Decl>, UsesAllocatorsDeclKind>
198 Expr *DeclareMapperVar =
nullptr;
201 :
Directive(DKind), DirectiveName(Name), CurScope(CurScope),
203 SharingMapTy() =
default;
209 DeclSAMapTy Threadprivates;
216 bool ForceCapturing =
false;
219 bool ForceCaptureByReferenceInTargetExecutable =
false;
220 CriticalsWithHintsTy Criticals;
221 unsigned IgnoredStackElements = 0;
225 using const_iterator = StackTy::const_reverse_iterator;
226 const_iterator begin()
const {
227 return Stack.empty() ? const_iterator()
228 : Stack.back().first.rbegin() + IgnoredStackElements;
230 const_iterator end()
const {
231 return Stack.empty() ? const_iterator() : Stack.back().first.rend();
233 using iterator = StackTy::reverse_iterator;
235 return Stack.empty() ? iterator()
236 : Stack.back().first.rbegin() + IgnoredStackElements;
239 return Stack.empty() ? iterator() : Stack.back().first.rend();
244 bool isStackEmpty()
const {
245 return Stack.empty() ||
246 Stack.back().second != CurrentNonCapturingFunctionScope ||
247 Stack.back().first.size() <= IgnoredStackElements;
249 size_t getStackSize()
const {
250 return isStackEmpty() ? 0
251 : Stack.back().first.size() - IgnoredStackElements;
254 SharingMapTy *getTopOfStackOrNull() {
255 size_t Size = getStackSize();
258 return &Stack.back().first[
Size - 1];
260 const SharingMapTy *getTopOfStackOrNull()
const {
261 return const_cast<DSAStackTy &
>(*this).getTopOfStackOrNull();
263 SharingMapTy &getTopOfStack() {
264 assert(!isStackEmpty() &&
"no current directive");
265 return *getTopOfStackOrNull();
267 const SharingMapTy &getTopOfStack()
const {
268 return const_cast<DSAStackTy &
>(*this).getTopOfStack();
271 SharingMapTy *getSecondOnStackOrNull() {
272 size_t Size = getStackSize();
275 return &Stack.back().first[
Size - 2];
277 const SharingMapTy *getSecondOnStackOrNull()
const {
278 return const_cast<DSAStackTy &
>(*this).getSecondOnStackOrNull();
287 SharingMapTy &getStackElemAtLevel(
unsigned Level) {
288 assert(
Level < getStackSize() &&
"no such stack element");
289 return Stack.back().first[
Level];
291 const SharingMapTy &getStackElemAtLevel(
unsigned Level)
const {
292 return const_cast<DSAStackTy &
>(*this).getStackElemAtLevel(
Level);
295 DSAVarData getDSA(const_iterator &Iter,
ValueDecl *D)
const;
298 bool isOpenMPLocal(
VarDecl *D, const_iterator Iter)
const;
311 Expr *OMPPredefinedAllocators[OMPAllocateDeclAttr::OMPUserDefinedMemAlloc] = {
320 explicit DSAStackTy(
Sema &S) : SemaRef(S) {}
323 void setOMPAllocatorHandleT(
QualType Ty) { OMPAllocatorHandleT = Ty; }
325 QualType getOMPAllocatorHandleT()
const {
return OMPAllocatorHandleT; }
327 void setOMPAlloctraitT(
QualType Ty) { OMPAlloctraitT = Ty; }
329 QualType getOMPAlloctraitT()
const {
return OMPAlloctraitT; }
331 void setAllocator(OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
333 OMPPredefinedAllocators[AllocatorKind] = Allocator;
336 Expr *getAllocator(OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind)
const {
337 return OMPPredefinedAllocators[AllocatorKind];
340 void setOMPDependT(
QualType Ty) { OMPDependT = Ty; }
342 QualType getOMPDependT()
const {
return OMPDependT; }
345 void setOMPEventHandleT(
QualType Ty) { OMPEventHandleT = Ty; }
347 QualType getOMPEventHandleT()
const {
return OMPEventHandleT; }
349 bool isClauseParsingMode()
const {
return ClauseKindMode != OMPC_unknown; }
351 assert(isClauseParsingMode() &&
"Must be in clause parsing mode.");
352 return ClauseKindMode;
356 bool isBodyComplete()
const {
357 const SharingMapTy *Top = getTopOfStackOrNull();
358 return Top && Top->BodyComplete;
360 void setBodyComplete() { getTopOfStack().BodyComplete =
true; }
362 bool isForceVarCapturing()
const {
return ForceCapturing; }
363 void setForceVarCapturing(
bool V) { ForceCapturing =
V; }
365 void setForceCaptureByReferenceInTargetExecutable(
bool V) {
366 ForceCaptureByReferenceInTargetExecutable =
V;
368 bool isForceCaptureByReferenceInTargetExecutable()
const {
369 return ForceCaptureByReferenceInTargetExecutable;
374 assert(!IgnoredStackElements &&
375 "cannot change stack while ignoring elements");
377 Stack.back().second != CurrentNonCapturingFunctionScope)
378 Stack.emplace_back(StackTy(), CurrentNonCapturingFunctionScope);
379 Stack.back().first.emplace_back(DKind, DirName, CurScope, Loc);
380 Stack.back().first.back().DefaultAttrLoc = Loc;
384 assert(!IgnoredStackElements &&
385 "cannot change stack while ignoring elements");
386 assert(!Stack.back().first.empty() &&
387 "Data-sharing attributes stack is empty!");
388 Stack.back().first.pop_back();
393 class ParentDirectiveScope {
398 ParentDirectiveScope(DSAStackTy &Self,
bool Activate)
399 : Self(Self), Active(
false) {
403 ~ParentDirectiveScope() { disable(); }
406 --Self.IgnoredStackElements;
412 ++Self.IgnoredStackElements;
421 "Expected loop-based directive.");
422 getTopOfStack().LoopStart =
true;
427 "Expected loop-based directive.");
428 getTopOfStack().LoopStart =
false;
431 bool isLoopStarted()
const {
433 "Expected loop-based directive.");
434 return !getTopOfStack().LoopStart;
437 void resetPossibleLoopCounter(
const Decl *D =
nullptr) {
438 getTopOfStack().PossiblyLoopCounter = D ? D->getCanonicalDecl() : D;
441 const Decl *getPossiblyLoopCunter()
const {
442 return getTopOfStack().PossiblyLoopCounter;
445 void pushFunction() {
446 assert(!IgnoredStackElements &&
447 "cannot change stack while ignoring elements");
449 assert(!isa<CapturingScopeInfo>(CurFnScope));
450 CurrentNonCapturingFunctionScope = CurFnScope;
454 assert(!IgnoredStackElements &&
455 "cannot change stack while ignoring elements");
456 if (!Stack.empty() && Stack.back().second == OldFSI) {
457 assert(Stack.back().first.empty());
460 CurrentNonCapturingFunctionScope =
nullptr;
462 if (!isa<CapturingScopeInfo>(FSI)) {
463 CurrentNonCapturingFunctionScope = FSI;
472 const std::pair<const OMPCriticalDirective *, llvm::APSInt>
474 auto I = Criticals.find(Name.getAsString());
475 if (I != Criticals.end())
494 const LCDeclInfo isLoopControlVariable(
const ValueDecl *D)
const;
499 const LCDeclInfo isParentLoopControlVariable(
const ValueDecl *D)
const;
504 const LCDeclInfo isLoopControlVariable(
const ValueDecl *D,
505 unsigned Level)
const;
508 const ValueDecl *getParentLoopControlVariable(
unsigned I)
const;
511 void markDeclAsUsedInScanDirective(
ValueDecl *D) {
512 if (SharingMapTy *Stack = getSecondOnStackOrNull())
513 Stack->UsedInScanDirective.insert(D);
517 bool isUsedInScanDirective(
ValueDecl *D)
const {
518 if (
const SharingMapTy *Stack = getTopOfStackOrNull())
519 return Stack->UsedInScanDirective.contains(D);
525 DeclRefExpr *PrivateCopy =
nullptr,
unsigned Modifier = 0,
526 bool AppliedToPointee =
false);
535 const Expr *ReductionRef);
541 Expr *&TaskgroupDescriptor)
const;
546 const Expr *&ReductionRef,
547 Expr *&TaskgroupDescriptor)
const;
550 Expr *getTaskgroupReductionRef()
const {
551 assert((getTopOfStack().
Directive == OMPD_taskgroup ||
555 "taskgroup reference expression requested for non taskgroup or "
556 "parallel/worksharing directive.");
557 return getTopOfStack().TaskgroupReductionRef;
561 bool isTaskgroupReductionRef(
const ValueDecl *VD,
unsigned Level)
const {
562 return getStackElemAtLevel(
Level).TaskgroupReductionRef &&
563 cast<DeclRefExpr>(getStackElemAtLevel(
Level).TaskgroupReductionRef)
569 const DSAVarData getTopDSA(
ValueDecl *D,
bool FromParent);
571 const DSAVarData getImplicitDSA(
ValueDecl *D,
bool FromParent)
const;
573 const DSAVarData getImplicitDSA(
ValueDecl *D,
unsigned Level)
const;
581 bool FromParent)
const;
589 bool FromParent)
const;
596 unsigned Level,
bool NotLastprivate =
false)
const;
600 bool hasExplicitDirective(
602 unsigned Level)
const;
606 const llvm::function_ref<
bool(
609 bool FromParent)
const;
613 const SharingMapTy *Top = getTopOfStackOrNull();
614 return Top ? Top->Directive : OMPD_unknown;
618 assert(!isStackEmpty() &&
"No directive at specified level.");
619 return getStackElemAtLevel(
Level).Directive;
623 unsigned OpenMPCaptureLevel)
const {
626 return CaptureRegions[OpenMPCaptureLevel];
630 const SharingMapTy *
Parent = getSecondOnStackOrNull();
635 void addRequiresDecl(
OMPRequiresDecl *RD) { RequiresDecls.push_back(RD); }
638 template <
typename ClauseType>
bool hasRequiresDeclWithClause()
const {
641 return isa<ClauseType>(C);
649 bool IsDuplicate =
false;
652 for (
const OMPClause *CPrev : D->clauselists()) {
653 if (CNew->getClauseKind() == CPrev->getClauseKind()) {
654 SemaRef.
Diag(CNew->getBeginLoc(),
655 diag::err_omp_requires_clause_redeclaration)
656 << getOpenMPClauseName(CNew->getClauseKind());
657 SemaRef.
Diag(CPrev->getBeginLoc(),
658 diag::note_omp_requires_previous_clause)
659 << getOpenMPClauseName(CPrev->getClauseKind());
670 TargetLocations.push_back(LocStart);
676 AtomicLocation = Loc;
681 SourceLocation getAtomicDirectiveLoc()
const {
return AtomicLocation; }
685 return TargetLocations;
690 getTopOfStack().DefaultAttr = DSA_none;
691 getTopOfStack().DefaultAttrLoc = Loc;
695 getTopOfStack().DefaultAttr = DSA_shared;
696 getTopOfStack().DefaultAttrLoc = Loc;
700 getTopOfStack().DefaultAttr = DSA_firstprivate;
701 getTopOfStack().DefaultAttrLoc = Loc;
706 DefaultmapInfo &DMI = getTopOfStack().DefaultmapMap[
Kind];
707 DMI.ImplicitBehavior = M;
713 return getTopOfStack()
714 .DefaultmapMap[OMPC_DEFAULTMAP_aggregate]
717 .DefaultmapMap[OMPC_DEFAULTMAP_scalar]
720 .DefaultmapMap[OMPC_DEFAULTMAP_pointer]
722 return getTopOfStack().DefaultmapMap[VariableCategory].ImplicitBehavior !=
727 return ConstructTraits;
732 ConstructTraits.append(Traits.begin(), Traits.end());
734 for (llvm::omp::TraitProperty Trait : llvm::reverse(Traits)) {
735 llvm::omp::TraitProperty Top = ConstructTraits.pop_back_val();
736 assert(Top == Trait &&
"Something left a trait on the stack!");
742 DefaultDataSharingAttributes getDefaultDSA(
unsigned Level)
const {
743 return getStackSize() <=
Level ? DSA_unspecified
744 : getStackElemAtLevel(
Level).DefaultAttr;
746 DefaultDataSharingAttributes getDefaultDSA()
const {
747 return isStackEmpty() ? DSA_unspecified : getTopOfStack().DefaultAttr;
750 return isStackEmpty() ?
SourceLocation() : getTopOfStack().DefaultAttrLoc;
754 return isStackEmpty()
756 : getTopOfStack().DefaultmapMap[
Kind].ImplicitBehavior;
759 getDefaultmapModifierAtLevel(
unsigned Level,
761 return getStackElemAtLevel(
Level).DefaultmapMap[
Kind].ImplicitBehavior;
763 bool isDefaultmapCapturedByRef(
unsigned Level,
766 getDefaultmapModifierAtLevel(
Level,
Kind);
767 if (
Kind == OMPC_DEFAULTMAP_scalar ||
Kind == OMPC_DEFAULTMAP_pointer) {
768 return (M == OMPC_DEFAULTMAP_MODIFIER_alloc) ||
769 (M == OMPC_DEFAULTMAP_MODIFIER_to) ||
770 (M == OMPC_DEFAULTMAP_MODIFIER_from) ||
771 (M == OMPC_DEFAULTMAP_MODIFIER_tofrom);
778 case OMPC_DEFAULTMAP_scalar:
779 case OMPC_DEFAULTMAP_pointer:
781 (M == OMPC_DEFAULTMAP_MODIFIER_firstprivate) ||
782 (M == OMPC_DEFAULTMAP_MODIFIER_default);
783 case OMPC_DEFAULTMAP_aggregate:
784 return M == OMPC_DEFAULTMAP_MODIFIER_firstprivate;
788 llvm_unreachable(
"Unexpected OpenMPDefaultmapClauseKind enum");
790 bool mustBeFirstprivateAtLevel(
unsigned Level,
793 getDefaultmapModifierAtLevel(
Level,
Kind);
794 return mustBeFirstprivateBase(M,
Kind);
798 return mustBeFirstprivateBase(M,
Kind);
802 bool isThreadPrivate(
VarDecl *D) {
803 const DSAVarData DVar = getTopDSA(D,
false);
808 void setOrderedRegion(
bool IsOrdered,
const Expr *Param,
811 getTopOfStack().OrderedRegion.emplace(Param, Clause);
813 getTopOfStack().OrderedRegion.reset();
817 bool isOrderedRegion()
const {
818 if (
const SharingMapTy *Top = getTopOfStackOrNull())
819 return Top->OrderedRegion.hasValue();
823 std::pair<const Expr *, OMPOrderedClause *> getOrderedRegionParam()
const {
824 if (
const SharingMapTy *Top = getTopOfStackOrNull())
825 if (Top->OrderedRegion.hasValue())
826 return Top->OrderedRegion.getValue();
827 return std::make_pair(
nullptr,
nullptr);
831 bool isParentOrderedRegion()
const {
832 if (
const SharingMapTy *
Parent = getSecondOnStackOrNull())
833 return Parent->OrderedRegion.hasValue();
837 std::pair<const Expr *, OMPOrderedClause *>
838 getParentOrderedRegionParam()
const {
839 if (
const SharingMapTy *
Parent = getSecondOnStackOrNull())
840 if (
Parent->OrderedRegion.hasValue())
841 return Parent->OrderedRegion.getValue();
842 return std::make_pair(
nullptr,
nullptr);
845 void setNowaitRegion(
bool IsNowait =
true) {
846 getTopOfStack().NowaitRegion = IsNowait;
850 bool isParentNowaitRegion()
const {
851 if (
const SharingMapTy *
Parent = getSecondOnStackOrNull())
852 return Parent->NowaitRegion;
856 void setUntiedRegion(
bool IsUntied =
true) {
857 getTopOfStack().UntiedRegion = IsUntied;
860 bool isUntiedRegion()
const {
861 const SharingMapTy *Top = getTopOfStackOrNull();
862 return Top ? Top->UntiedRegion :
false;
865 void setParentCancelRegion(
bool Cancel =
true) {
866 if (SharingMapTy *
Parent = getSecondOnStackOrNull())
867 Parent->CancelRegion |= Cancel;
870 bool isCancelRegion()
const {
871 const SharingMapTy *Top = getTopOfStackOrNull();
872 return Top ? Top->CancelRegion :
false;
877 if (SharingMapTy *
Parent = getSecondOnStackOrNull())
878 Parent->PrevScanLocation = Loc;
881 bool doesParentHasScanDirective()
const {
882 const SharingMapTy *Top = getSecondOnStackOrNull();
883 return Top ? Top->PrevScanLocation.isValid() :
false;
887 const SharingMapTy *Top = getSecondOnStackOrNull();
892 if (SharingMapTy *
Parent = getSecondOnStackOrNull())
893 Parent->PrevOrderedLocation = Loc;
896 bool doesParentHasOrderedDirective()
const {
897 const SharingMapTy *Top = getSecondOnStackOrNull();
898 return Top ? Top->PrevOrderedLocation.isValid() :
false;
902 const SharingMapTy *Top = getSecondOnStackOrNull();
907 void setAssociatedLoops(
unsigned Val) {
908 getTopOfStack().AssociatedLoops = Val;
910 getTopOfStack().HasMutipleLoops =
true;
913 unsigned getAssociatedLoops()
const {
914 const SharingMapTy *Top = getTopOfStackOrNull();
915 return Top ? Top->AssociatedLoops : 0;
918 bool hasMutipleLoops()
const {
919 const SharingMapTy *Top = getTopOfStackOrNull();
920 return Top ? Top->HasMutipleLoops :
false;
926 if (SharingMapTy *
Parent = getSecondOnStackOrNull())
927 Parent->InnerTeamsRegionLoc = TeamsRegionLoc;
930 bool hasInnerTeamsRegion()
const {
931 return getInnerTeamsRegionLoc().
isValid();
935 const SharingMapTy *Top = getTopOfStackOrNull();
939 Scope *getCurScope()
const {
940 const SharingMapTy *Top = getTopOfStackOrNull();
941 return Top ? Top->CurScope :
nullptr;
943 void setContext(
DeclContext *DC) { getTopOfStack().Context = DC; }
945 const SharingMapTy *Top = getTopOfStackOrNull();
951 bool checkMappableExprComponentListsForDecl(
952 const ValueDecl *VD,
bool CurrentRegionOnly,
953 const llvm::function_ref<
965 if (CurrentRegionOnly)
970 for (; SI != SE; ++SI) {
971 auto MI = SI->MappedExprComponents.find(VD);
972 if (MI != SI->MappedExprComponents.end())
974 MI->second.Components)
975 if (Check(L, MI->second.Kind))
983 bool checkMappableExprComponentListsForDeclAtLevel(
985 const llvm::function_ref<
989 if (getStackSize() <=
Level)
992 const SharingMapTy &StackElem = getStackElemAtLevel(
Level);
993 auto MI = StackElem.MappedExprComponents.find(VD);
994 if (MI != StackElem.MappedExprComponents.end())
996 MI->second.Components)
997 if (Check(L, MI->second.Kind))
1004 void addMappableExpressionComponents(
1008 MappedExprComponentTy &MEC = getTopOfStack().MappedExprComponents[VD];
1010 MEC.Components.resize(MEC.Components.size() + 1);
1011 MEC.Components.back().append(Components.begin(), Components.end());
1012 MEC.Kind = WhereFoundClauseKind;
1015 unsigned getNestingLevel()
const {
1016 assert(!isStackEmpty());
1017 return getStackSize() - 1;
1020 const OperatorOffsetTy &OpsOffs) {
1021 SharingMapTy *
Parent = getSecondOnStackOrNull();
1023 Parent->DoacrossDepends.try_emplace(C, OpsOffs);
1025 llvm::iterator_range<DoacrossDependMapTy::const_iterator>
1026 getDoacrossDependClauses()
const {
1027 const SharingMapTy &StackElem = getTopOfStack();
1029 const DoacrossDependMapTy &Ref = StackElem.DoacrossDepends;
1030 return llvm::make_range(Ref.begin(), Ref.end());
1032 return llvm::make_range(StackElem.DoacrossDepends.end(),
1033 StackElem.DoacrossDepends.end());
1037 void addMappedClassesQualTypes(
QualType QT) {
1038 SharingMapTy &StackElem = getTopOfStack();
1039 StackElem.MappedClassesQualTypes.insert(QT);
1043 bool isClassPreviouslyMapped(
QualType QT)
const {
1044 const SharingMapTy &StackElem = getTopOfStack();
1045 return StackElem.MappedClassesQualTypes.contains(QT);
1049 void addToParentTargetRegionLinkGlobals(
DeclRefExpr *E) {
1050 assert(*OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(
1051 E->
getDecl()) == OMPDeclareTargetDeclAttr::MT_Link &&
1052 "Expected declare target link global.");
1053 for (
auto &Elem : *
this) {
1055 Elem.DeclareTargetLinkVarDecls.push_back(E);
1065 "Expected target executable directive.");
1066 return getTopOfStack().DeclareTargetLinkVarDecls;
1070 void addInnerAllocatorExpr(
Expr *E) {
1071 getTopOfStack().InnerUsedAllocators.push_back(E);
1075 return getTopOfStack().InnerUsedAllocators;
1079 void addImplicitTaskFirstprivate(
unsigned Level,
Decl *D) {
1080 getStackElemAtLevel(
Level).ImplicitTaskFirstprivates.insert(D);
1083 bool isImplicitTaskFirstprivate(
Decl *D)
const {
1084 return getTopOfStack().ImplicitTaskFirstprivates.contains(D);
1088 void addUsesAllocatorsDecl(
const Decl *D, UsesAllocatorsDeclKind
Kind) {
1089 getTopOfStack().UsesAllocatorsDecls.try_emplace(D,
Kind);
1094 const Decl *D)
const {
1095 const SharingMapTy &StackElem = getTopOfStack();
1096 auto I = StackElem.UsesAllocatorsDecls.find(D);
1097 if (I == StackElem.UsesAllocatorsDecls.end())
1099 return I->getSecond();
1102 const SharingMapTy &StackElem = getTopOfStack();
1103 auto I = StackElem.UsesAllocatorsDecls.find(D);
1104 if (I == StackElem.UsesAllocatorsDecls.end())
1106 return I->getSecond();
1109 void addDeclareMapperVarRef(
Expr *Ref) {
1110 SharingMapTy &StackElem = getTopOfStack();
1111 StackElem.DeclareMapperVar = Ref;
1113 const Expr *getDeclareMapperVarRef()
const {
1114 const SharingMapTy *Top = getTopOfStackOrNull();
1115 return Top ? Top->DeclareMapperVar :
nullptr;
1125 DKind == OMPD_unknown;
1131 if (
const auto *FE = dyn_cast<FullExpr>(E))
1132 E = FE->getSubExpr();
1134 if (
const auto *MTE = dyn_cast<MaterializeTemporaryExpr>(E))
1135 E = MTE->getSubExpr();
1137 while (
const auto *Binder = dyn_cast<CXXBindTemporaryExpr>(E))
1138 E = Binder->getSubExpr();
1140 if (
const auto *ICE = dyn_cast<ImplicitCastExpr>(E))
1141 E = ICE->getSubExprAsWritten();
1150 if (
const auto *CED = dyn_cast<OMPCapturedExprDecl>(D))
1151 if (
const auto *ME = dyn_cast<MemberExpr>(
getExprAsWritten(CED->getInit())))
1152 D = ME->getMemberDecl();
1153 const auto *VD = dyn_cast<VarDecl>(D);
1154 const auto *FD = dyn_cast<FieldDecl>(D);
1155 if (VD !=
nullptr) {
1160 FD = FD->getCanonicalDecl();
1171 DSAStackTy::DSAVarData DSAStackTy::getDSA(const_iterator &Iter,
1174 auto *VD = dyn_cast<VarDecl>(D);
1175 const auto *FD = dyn_cast<FieldDecl>(D);
1177 if (Iter == end()) {
1183 if (VD && !VD->isFunctionOrMethodVarDecl() && !isa<ParmVarDecl>(VD))
1184 DVar.CKind = OMPC_shared;
1190 if (VD && VD->hasGlobalStorage())
1191 DVar.CKind = OMPC_shared;
1195 DVar.CKind = OMPC_shared;
1204 if (VD && isOpenMPLocal(VD, Iter) && VD->isLocalVarDecl() &&
1205 (VD->getStorageClass() ==
SC_Auto || VD->getStorageClass() ==
SC_None)) {
1206 DVar.CKind = OMPC_private;
1210 DVar.DKind = Iter->Directive;
1213 if (Iter->SharingMap.count(D)) {
1214 const DSAInfo &Data = Iter->SharingMap.lookup(D);
1215 DVar.RefExpr = Data.RefExpr.getPointer();
1216 DVar.PrivateCopy = Data.PrivateCopy;
1217 DVar.CKind = Data.Attributes;
1218 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1219 DVar.Modifier = Data.Modifier;
1220 DVar.AppliedToPointee = Data.AppliedToPointee;
1228 switch (Iter->DefaultAttr) {
1230 DVar.CKind = OMPC_shared;
1231 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1235 case DSA_firstprivate:
1236 if (VD->getStorageDuration() ==
SD_Static &&
1238 DVar.CKind = OMPC_unknown;
1240 DVar.CKind = OMPC_firstprivate;
1242 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1244 case DSA_unspecified:
1249 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1253 DVar.CKind = OMPC_shared;
1263 DSAVarData DVarTemp;
1264 const_iterator I = Iter, E = end();
1272 DVarTemp = getDSA(I, D);
1273 if (DVarTemp.CKind != OMPC_shared) {
1274 DVar.RefExpr =
nullptr;
1275 DVar.CKind = OMPC_firstprivate;
1278 }
while (I != E && !isImplicitTaskingRegion(I->Directive));
1280 (DVarTemp.CKind == OMPC_unknown) ? OMPC_firstprivate : OMPC_shared;
1289 return getDSA(++Iter, D);
1292 const Expr *DSAStackTy::addUniqueAligned(
const ValueDecl *D,
1293 const Expr *NewDE) {
1294 assert(!isStackEmpty() &&
"Data sharing attributes stack is empty");
1296 SharingMapTy &StackElem = getTopOfStack();
1297 auto It = StackElem.AlignedMap.find(D);
1298 if (It == StackElem.AlignedMap.end()) {
1299 assert(NewDE &&
"Unexpected nullptr expr to be added into aligned map");
1300 StackElem.AlignedMap[D] = NewDE;
1303 assert(It->second &&
"Unexpected nullptr expr in the aligned map");
1307 const Expr *DSAStackTy::addUniqueNontemporal(
const ValueDecl *D,
1308 const Expr *NewDE) {
1309 assert(!isStackEmpty() &&
"Data sharing attributes stack is empty");
1311 SharingMapTy &StackElem = getTopOfStack();
1312 auto It = StackElem.NontemporalMap.find(D);
1313 if (It == StackElem.NontemporalMap.end()) {
1314 assert(NewDE &&
"Unexpected nullptr expr to be added into aligned map");
1315 StackElem.NontemporalMap[D] = NewDE;
1318 assert(It->second &&
"Unexpected nullptr expr in the aligned map");
1323 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1325 SharingMapTy &StackElem = getTopOfStack();
1326 StackElem.LCVMap.try_emplace(
1327 D, LCDeclInfo(StackElem.LCVMap.size() + 1,
Capture));
1330 const DSAStackTy::LCDeclInfo
1331 DSAStackTy::isLoopControlVariable(
const ValueDecl *D)
const {
1332 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1334 const SharingMapTy &StackElem = getTopOfStack();
1335 auto It = StackElem.LCVMap.find(D);
1336 if (It != StackElem.LCVMap.end())
1338 return {0,
nullptr};
1341 const DSAStackTy::LCDeclInfo
1342 DSAStackTy::isLoopControlVariable(
const ValueDecl *D,
unsigned Level)
const {
1343 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1345 for (
unsigned I =
Level + 1; I > 0; --I) {
1346 const SharingMapTy &StackElem = getStackElemAtLevel(I - 1);
1347 auto It = StackElem.LCVMap.find(D);
1348 if (It != StackElem.LCVMap.end())
1351 return {0,
nullptr};
1354 const DSAStackTy::LCDeclInfo
1355 DSAStackTy::isParentLoopControlVariable(
const ValueDecl *D)
const {
1356 const SharingMapTy *
Parent = getSecondOnStackOrNull();
1357 assert(
Parent &&
"Data-sharing attributes stack is empty");
1359 auto It =
Parent->LCVMap.find(D);
1360 if (It !=
Parent->LCVMap.end())
1362 return {0,
nullptr};
1365 const ValueDecl *DSAStackTy::getParentLoopControlVariable(
unsigned I)
const {
1366 const SharingMapTy *
Parent = getSecondOnStackOrNull();
1367 assert(
Parent &&
"Data-sharing attributes stack is empty");
1368 if (
Parent->LCVMap.size() < I)
1370 for (
const auto &Pair :
Parent->LCVMap)
1371 if (Pair.second.first == I)
1378 bool AppliedToPointee) {
1380 if (A == OMPC_threadprivate) {
1381 DSAInfo &Data = Threadprivates[D];
1382 Data.Attributes = A;
1383 Data.RefExpr.setPointer(E);
1384 Data.PrivateCopy =
nullptr;
1385 Data.Modifier = Modifier;
1387 DSAInfo &Data = getTopOfStack().SharingMap[D];
1388 assert(Data.Attributes == OMPC_unknown || (A == Data.Attributes) ||
1389 (A == OMPC_firstprivate && Data.Attributes == OMPC_lastprivate) ||
1390 (A == OMPC_lastprivate && Data.Attributes == OMPC_firstprivate) ||
1391 (isLoopControlVariable(D).first && A == OMPC_private));
1392 Data.Modifier = Modifier;
1393 if (A == OMPC_lastprivate && Data.Attributes == OMPC_firstprivate) {
1394 Data.RefExpr.setInt(
true);
1397 const bool IsLastprivate =
1398 A == OMPC_lastprivate || Data.Attributes == OMPC_lastprivate;
1399 Data.Attributes = A;
1400 Data.RefExpr.setPointerAndInt(E, IsLastprivate);
1401 Data.PrivateCopy = PrivateCopy;
1402 Data.AppliedToPointee = AppliedToPointee;
1404 DSAInfo &Data = getTopOfStack().SharingMap[PrivateCopy->
getDecl()];
1405 Data.Modifier = Modifier;
1406 Data.Attributes = A;
1407 Data.RefExpr.setPointerAndInt(PrivateCopy, IsLastprivate);
1408 Data.PrivateCopy =
nullptr;
1409 Data.AppliedToPointee = AppliedToPointee;
1416 StringRef Name,
const AttrVec *Attrs =
nullptr,
1431 OMPReferencedVarAttr::CreateImplicit(SemaRef.
Context, OrigRef));
1438 bool RefersToCapture =
false) {
1449 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1451 getTopOfStack().SharingMap[D].Attributes == OMPC_reduction &&
1452 "Additional reduction info may be specified only for reduction items.");
1453 ReductionData &ReductionData = getTopOfStack().ReductionMap[D];
1454 assert(ReductionData.ReductionRange.isInvalid() &&
1455 (getTopOfStack().
Directive == OMPD_taskgroup ||
1459 "Additional reduction info may be specified only once for reduction "
1461 ReductionData.set(BOK, SR);
1462 Expr *&TaskgroupReductionRef = getTopOfStack().TaskgroupReductionRef;
1463 if (!TaskgroupReductionRef) {
1466 TaskgroupReductionRef =
1472 const Expr *ReductionRef) {
1474 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1476 getTopOfStack().SharingMap[D].Attributes == OMPC_reduction &&
1477 "Additional reduction info may be specified only for reduction items.");
1478 ReductionData &ReductionData = getTopOfStack().ReductionMap[D];
1479 assert(ReductionData.ReductionRange.isInvalid() &&
1480 (getTopOfStack().
Directive == OMPD_taskgroup ||
1484 "Additional reduction info may be specified only once for reduction "
1486 ReductionData.set(ReductionRef, SR);
1487 Expr *&TaskgroupReductionRef = getTopOfStack().TaskgroupReductionRef;
1488 if (!TaskgroupReductionRef) {
1491 TaskgroupReductionRef =
1496 const DSAStackTy::DSAVarData DSAStackTy::getTopMostTaskgroupReductionData(
1498 Expr *&TaskgroupDescriptor)
const {
1500 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty.");
1501 for (const_iterator I = begin() + 1, E = end(); I != E; ++I) {
1502 const DSAInfo &Data = I->SharingMap.lookup(D);
1503 if (Data.Attributes != OMPC_reduction ||
1504 Data.Modifier != OMPC_REDUCTION_task)
1506 const ReductionData &ReductionData = I->ReductionMap.lookup(D);
1507 if (!ReductionData.ReductionOp ||
1508 ReductionData.ReductionOp.is<
const Expr *>())
1509 return DSAVarData();
1510 SR = ReductionData.ReductionRange;
1511 BOK = ReductionData.ReductionOp.get<ReductionData::BOKPtrType>();
1512 assert(I->TaskgroupReductionRef &&
"taskgroup reduction reference "
1513 "expression for the descriptor is not "
1515 TaskgroupDescriptor = I->TaskgroupReductionRef;
1516 return DSAVarData(I->Directive, OMPC_reduction, Data.RefExpr.getPointer(),
1517 Data.PrivateCopy, I->DefaultAttrLoc, OMPC_REDUCTION_task,
1520 return DSAVarData();
1523 const DSAStackTy::DSAVarData DSAStackTy::getTopMostTaskgroupReductionData(
1525 Expr *&TaskgroupDescriptor)
const {
1527 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty.");
1528 for (const_iterator I = begin() + 1, E = end(); I != E; ++I) {
1529 const DSAInfo &Data = I->SharingMap.lookup(D);
1530 if (Data.Attributes != OMPC_reduction ||
1531 Data.Modifier != OMPC_REDUCTION_task)
1533 const ReductionData &ReductionData = I->ReductionMap.lookup(D);
1534 if (!ReductionData.ReductionOp ||
1535 !ReductionData.ReductionOp.is<
const Expr *>())
1536 return DSAVarData();
1537 SR = ReductionData.ReductionRange;
1538 ReductionRef = ReductionData.ReductionOp.get<
const Expr *>();
1539 assert(I->TaskgroupReductionRef &&
"taskgroup reduction reference "
1540 "expression for the descriptor is not "
1542 TaskgroupDescriptor = I->TaskgroupReductionRef;
1543 return DSAVarData(I->Directive, OMPC_reduction, Data.RefExpr.getPointer(),
1544 Data.PrivateCopy, I->DefaultAttrLoc, OMPC_REDUCTION_task,
1547 return DSAVarData();
1550 bool DSAStackTy::isOpenMPLocal(
VarDecl *D, const_iterator I)
const {
1552 for (const_iterator E = end(); I != E; ++I) {
1553 if (isImplicitOrExplicitTaskingRegion(I->Directive) ||
1557 Scope *CurScope = getCurScope();
1558 while (CurScope && CurScope != TopScope && !CurScope->
isDeclScope(D))
1560 return CurScope != TopScope;
1563 if (I->Context == DC)
1572 bool AcceptIfMutable =
true,
1573 bool *IsClassType =
nullptr) {
1575 Type =
Type.getNonReferenceType().getCanonicalType();
1576 bool IsConstant =
Type.isConstant(Context);
1581 if (
const auto *CTSD = dyn_cast_or_null<ClassTemplateSpecializationDecl>(RD))
1583 RD = CTD->getTemplatedDecl();
1586 return IsConstant && !(SemaRef.
getLangOpts().CPlusPlus && RD &&
1593 bool AcceptIfMutable =
true,
1594 bool ListItemNotVar =
false) {
1598 unsigned Diag = ListItemNotVar ? diag::err_omp_const_list_item
1599 : IsClassType ? diag::err_omp_const_not_mutable_variable
1600 : diag::err_omp_const_variable;
1601 SemaRef.
Diag(ELoc,
Diag) << getOpenMPClauseName(CKind);
1602 if (!ListItemNotVar && D) {
1603 const VarDecl *VD = dyn_cast<VarDecl>(D);
1607 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
1615 const DSAStackTy::DSAVarData DSAStackTy::getTopDSA(
ValueDecl *D,
1620 auto *VD = dyn_cast<VarDecl>(D);
1621 auto TI = Threadprivates.find(D);
1622 if (TI != Threadprivates.end()) {
1623 DVar.RefExpr = TI->getSecond().RefExpr.getPointer();
1624 DVar.CKind = OMPC_threadprivate;
1625 DVar.Modifier = TI->getSecond().Modifier;
1628 if (VD && VD->
hasAttr<OMPThreadPrivateDeclAttr>()) {
1631 VD->
getAttr<OMPThreadPrivateDeclAttr>()->getLocation());
1632 DVar.CKind = OMPC_threadprivate;
1633 addDSA(D, DVar.RefExpr, OMPC_threadprivate);
1640 !(VD->
hasAttr<OMPThreadPrivateDeclAttr>() &&
1644 VD->
hasAttr<AsmLabelAttr>() && !VD->isLocalVarDecl())) {
1647 DVar.CKind = OMPC_threadprivate;
1648 addDSA(D, DVar.RefExpr, OMPC_threadprivate);
1652 VD->isLocalVarDeclOrParm() && !isStackEmpty() &&
1653 !isLoopControlVariable(D).first) {
1654 const_iterator IterTarget =
1655 std::find_if(begin(), end(), [](
const SharingMapTy &Data) {
1658 if (IterTarget != end()) {
1659 const_iterator ParentIterTarget = IterTarget + 1;
1660 for (const_iterator Iter = begin(); Iter != ParentIterTarget; ++Iter) {
1661 if (isOpenMPLocal(VD, Iter)) {
1665 DVar.CKind = OMPC_threadprivate;
1669 if (!isClauseParsingMode() || IterTarget != begin()) {
1670 auto DSAIter = IterTarget->SharingMap.find(D);
1671 if (DSAIter != IterTarget->SharingMap.end() &&
1673 DVar.RefExpr = DSAIter->getSecond().RefExpr.getPointer();
1674 DVar.CKind = OMPC_threadprivate;
1677 const_iterator
End = end();
1683 IterTarget->ConstructLoc);
1684 DVar.CKind = OMPC_threadprivate;
1702 if (VD && VD->isStaticDataMember()) {
1704 const_iterator I = begin();
1705 const_iterator EndI = end();
1706 if (FromParent && I != EndI)
1709 auto It = I->SharingMap.find(D);
1710 if (It != I->SharingMap.end()) {
1711 const DSAInfo &Data = It->getSecond();
1712 DVar.RefExpr = Data.RefExpr.getPointer();
1713 DVar.PrivateCopy = Data.PrivateCopy;
1714 DVar.CKind = Data.Attributes;
1715 DVar.ImplicitDSALoc = I->DefaultAttrLoc;
1716 DVar.DKind = I->Directive;
1717 DVar.Modifier = Data.Modifier;
1718 DVar.AppliedToPointee = Data.AppliedToPointee;
1723 DVar.CKind = OMPC_shared;
1730 if (SemaRef.
LangOpts.OpenMP <= 31) {
1738 DSAVarData DVarTemp = hasInnermostDSA(
1741 return C == OMPC_firstprivate ||
C == OMPC_shared;
1743 MatchesAlways, FromParent);
1744 if (DVarTemp.CKind != OMPC_unknown && DVarTemp.RefExpr)
1747 DVar.CKind = OMPC_shared;
1754 const_iterator I = begin();
1755 const_iterator EndI = end();
1756 if (FromParent && I != EndI)
1760 auto It = I->SharingMap.find(D);
1761 if (It != I->SharingMap.end()) {
1762 const DSAInfo &Data = It->getSecond();
1763 DVar.RefExpr = Data.RefExpr.getPointer();
1764 DVar.PrivateCopy = Data.PrivateCopy;
1765 DVar.CKind = Data.Attributes;
1766 DVar.ImplicitDSALoc = I->DefaultAttrLoc;
1767 DVar.DKind = I->Directive;
1768 DVar.Modifier = Data.Modifier;
1769 DVar.AppliedToPointee = Data.AppliedToPointee;
1775 const DSAStackTy::DSAVarData DSAStackTy::getImplicitDSA(
ValueDecl *D,
1776 bool FromParent)
const {
1777 if (isStackEmpty()) {
1779 return getDSA(I, D);
1782 const_iterator StartI = begin();
1783 const_iterator EndI = end();
1784 if (FromParent && StartI != EndI)
1786 return getDSA(StartI, D);
1789 const DSAStackTy::DSAVarData DSAStackTy::getImplicitDSA(
ValueDecl *D,
1790 unsigned Level)
const {
1791 if (getStackSize() <=
Level)
1792 return DSAVarData();
1794 const_iterator StartI = std::next(begin(), getStackSize() - 1 -
Level);
1795 return getDSA(StartI, D);
1798 const DSAStackTy::DSAVarData
1802 bool FromParent)
const {
1806 const_iterator I = begin();
1807 const_iterator EndI = end();
1808 if (FromParent && I != EndI)
1810 for (; I != EndI; ++I) {
1811 if (!DPred(I->Directive) &&
1812 !isImplicitOrExplicitTaskingRegion(I->Directive))
1814 const_iterator NewI = I;
1815 DSAVarData DVar = getDSA(NewI, D);
1816 if (I == NewI && CPred(DVar.CKind, DVar.AppliedToPointee))
1822 const DSAStackTy::DSAVarData DSAStackTy::hasInnermostDSA(
1825 bool FromParent)
const {
1829 const_iterator StartI = begin();
1830 const_iterator EndI = end();
1831 if (FromParent && StartI != EndI)
1833 if (StartI == EndI || !DPred(StartI->Directive))
1835 const_iterator NewI = StartI;
1836 DSAVarData DVar = getDSA(NewI, D);
1837 return (NewI == StartI && CPred(DVar.CKind, DVar.AppliedToPointee))
1842 bool DSAStackTy::hasExplicitDSA(
1845 unsigned Level,
bool NotLastprivate)
const {
1846 if (getStackSize() <=
Level)
1849 const SharingMapTy &StackElem = getStackElemAtLevel(
Level);
1850 auto I = StackElem.SharingMap.find(D);
1851 if (I != StackElem.SharingMap.end() && I->getSecond().RefExpr.getPointer() &&
1852 CPred(I->getSecond().Attributes, I->getSecond().AppliedToPointee) &&
1853 (!NotLastprivate || !I->getSecond().RefExpr.getInt()))
1856 auto LI = StackElem.LCVMap.find(D);
1857 if (LI != StackElem.LCVMap.end())
1858 return CPred(OMPC_private,
false);
1862 bool DSAStackTy::hasExplicitDirective(
1864 unsigned Level)
const {
1865 if (getStackSize() <=
Level)
1867 const SharingMapTy &StackElem = getStackElemAtLevel(
Level);
1868 return DPred(StackElem.Directive);
1871 bool DSAStackTy::hasDirective(
1875 bool FromParent)
const {
1877 size_t Skip = FromParent ? 2 : 1;
1878 for (const_iterator I = begin() +
std::min(Skip, getStackSize()), E = end();
1880 if (DPred(I->Directive, I->DirectiveName, I->ConstructLoc))
1886 void Sema::InitDataSharingAttributesStack() {
1887 VarDataSharingAttributesStack =
new DSAStackTy(*
this);
1890 #define DSAStack static_cast<DSAStackTy *>(VarDataSharingAttributesStack)
1892 void Sema::pushOpenMPFunctionRegion() {
DSAStack->pushFunction(); }
1900 "Expected OpenMP device compilation.");
1906 enum class FunctionEmissionStatus {
1916 assert(LangOpts.OpenMP && LangOpts.OpenMPIsDevice &&
1917 "Expected OpenMP device compilation.");
1923 case FunctionEmissionStatus::Emitted:
1924 Kind = SemaDiagnosticBuilder::K_Immediate;
1935 ? SemaDiagnosticBuilder::K_Deferred
1936 : SemaDiagnosticBuilder::K_Immediate;
1938 case FunctionEmissionStatus::TemplateDiscarded:
1939 case FunctionEmissionStatus::OMPDiscarded:
1940 Kind = SemaDiagnosticBuilder::K_Nop;
1942 case FunctionEmissionStatus::CUDADiscarded:
1943 llvm_unreachable(
"CUDADiscarded unexpected in OpenMP device compilation");
1949 DeviceDiagnosticReason::OmpDevice);
1955 assert(LangOpts.OpenMP && !LangOpts.OpenMPIsDevice &&
1956 "Expected OpenMP host compilation.");
1962 case FunctionEmissionStatus::Emitted:
1963 Kind = SemaDiagnosticBuilder::K_Immediate;
1966 Kind = SemaDiagnosticBuilder::K_Deferred;
1968 case FunctionEmissionStatus::TemplateDiscarded:
1969 case FunctionEmissionStatus::OMPDiscarded:
1970 case FunctionEmissionStatus::CUDADiscarded:
1971 Kind = SemaDiagnosticBuilder::K_Nop;
1977 DeviceDiagnosticReason::OmpHost);
1982 if (LO.OpenMP <= 45) {
1984 return OMPC_DEFAULTMAP_scalar;
1985 return OMPC_DEFAULTMAP_aggregate;
1988 return OMPC_DEFAULTMAP_pointer;
1990 return OMPC_DEFAULTMAP_scalar;
1991 return OMPC_DEFAULTMAP_aggregate;
1995 unsigned OpenMPCaptureLevel)
const {
1996 assert(LangOpts.OpenMP &&
"OpenMP is not allowed");
1999 bool IsByRef =
true;
2005 bool IsVariableUsedInMapClause =
false;
2067 bool IsVariableAssociatedWithSection =
false;
2069 DSAStack->checkMappableExprComponentListsForDeclAtLevel(
2071 [&IsVariableUsedInMapClause, &IsVariableAssociatedWithSection,
2078 if (WhereFoundClauseKind != OMPC_map)
2081 auto EI = MapExprComponents.rbegin();
2082 auto EE = MapExprComponents.rend();
2084 assert(EI != EE &&
"Invalid map expression!");
2086 if (isa<DeclRefExpr>(EI->getAssociatedExpression()))
2087 IsVariableUsedInMapClause |= EI->getAssociatedDeclaration() == D;
2093 if (isa<ArraySubscriptExpr>(EI->getAssociatedExpression()) ||
2094 isa<OMPArraySectionExpr>(EI->getAssociatedExpression()) ||
2095 isa<MemberExpr>(EI->getAssociatedExpression()) ||
2096 isa<OMPArrayShapingExpr>(EI->getAssociatedExpression())) {
2097 IsVariableAssociatedWithSection =
true;
2106 if (IsVariableUsedInMapClause) {
2109 IsByRef = !(Ty->
isPointerType() && IsVariableAssociatedWithSection);
2114 IsByRef = (
DSAStack->isForceCaptureByReferenceInTargetExecutable() &&
2117 DSAStack->isDefaultmapCapturedByRef(
2122 return K == OMPC_reduction && !AppliedToPointee;
2130 ((IsVariableUsedInMapClause &&
2136 return K == OMPC_firstprivate ||
2137 (K == OMPC_reduction && AppliedToPointee);
2143 !(isa<OMPCapturedExprDecl>(D) && !D->
hasAttr<OMPCaptureNoInitAttr>() &&
2144 !cast<OMPCapturedExprDecl>(D)->getInit()->isGLValue()) &&
2147 !(
DSAStack->getDefaultDSA() == DSA_firstprivate &&
2168 unsigned Sema::getOpenMPNestingLevel()
const {
2169 assert(getLangOpts().OpenMP);
2170 return DSAStack->getNestingLevel();
2180 !
DSAStack->isClauseParsingMode()) ||
2191 assert(LangOpts.OpenMP &&
"OpenMP is not allowed");
2194 auto *VD = dyn_cast<VarDecl>(D);
2196 if (VD && VD->isConstexpr())
2203 DSAStackTy::ParentDirectiveScope InParentDirectiveRAII(
2209 if (VD && !VD->hasLocalStorage() &&
2210 (getCurCapturedRegion() || getCurBlock() || getCurLambda())) {
2211 if (isInOpenMPTargetExecutionDirective()) {
2212 DSAStackTy::DSAVarData DVarTop =
2214 if (DVarTop.CKind != OMPC_unknown && DVarTop.RefExpr)
2219 if (OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD))
2223 llvm::reverse(FunctionScopes),
2224 CheckScopeInfo ? (FunctionScopes.size() - (StopAt + 1)) : 0)) {
2225 if (!isa<CapturingScopeInfo>(FSI))
2227 if (
auto *RSI = dyn_cast<CapturedRegionScopeInfo>(FSI))
2233 assert(CSI &&
"Failed to find CapturedRegionScopeInfo");
2240 if (isInOpenMPDeclareTargetContext()) {
2243 if (LangOpts.OpenMP <= 45 &&
2244 !OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD))
2245 checkDeclIsAllowedInOpenMPTarget(
nullptr, VD);
2250 if (CheckScopeInfo) {
2251 bool OpenMPFound =
false;
2252 for (
unsigned I = StopAt + 1; I > 0; --I) {
2254 if (!isa<CapturingScopeInfo>(FSI))
2256 if (
auto *RSI = dyn_cast<CapturedRegionScopeInfo>(FSI))
2266 if (
DSAStack->getCurrentDirective() != OMPD_unknown &&
2267 (!
DSAStack->isClauseParsingMode() ||
2268 DSAStack->getParentDirective() != OMPD_unknown)) {
2269 auto &&Info =
DSAStack->isLoopControlVariable(D);
2271 (VD && VD->hasLocalStorage() &&
2272 isImplicitOrExplicitTaskingRegion(
DSAStack->getCurrentDirective())) ||
2273 (VD &&
DSAStack->isForceVarCapturing()))
2274 return VD ? VD : Info.second;
2275 DSAStackTy::DSAVarData DVarTop =
2277 if (DVarTop.CKind != OMPC_unknown &&
isOpenMPPrivate(DVarTop.CKind) &&
2278 (!VD || VD->hasLocalStorage() || !DVarTop.AppliedToPointee))
2279 return VD ? VD : cast<VarDecl>(DVarTop.PrivateCopy->getDecl());
2285 DSAStackTy::DSAVarData DVarPrivate =
DSAStack->hasDSA(
2293 if (VD && !VD->hasLocalStorage() && DVarPrivate.CKind == OMPC_unknown &&
2294 ((
DSAStack->getDefaultDSA() != DSA_none &&
2295 DSAStack->getDefaultDSA() != DSA_firstprivate) ||
2296 DVarTop.CKind == OMPC_shared))
2298 if (DVarPrivate.CKind != OMPC_unknown ||
2299 (VD && (
DSAStack->getDefaultDSA() == DSA_none ||
2300 DSAStack->getDefaultDSA() == DSA_firstprivate)))
2301 return VD ? VD : cast<VarDecl>(DVarPrivate.PrivateCopy->getDecl());
2306 void Sema::adjustOpenMPTargetScopeIndex(
unsigned &FunctionScopesIndex,
2307 unsigned Level)
const {
2308 FunctionScopesIndex -= getOpenMPCaptureLevels(
DSAStack->getDirective(
Level));
2312 assert(LangOpts.OpenMP &&
"OpenMP must be enabled.");
2318 assert(LangOpts.OpenMP &&
"OpenMP must be enabled.");
2320 DSAStack->resetPossibleLoopCounter();
2326 unsigned CapLevel)
const {
2327 assert(LangOpts.OpenMP &&
"OpenMP is not allowed");
2329 bool IsTriviallyCopyable =
2339 (IsTriviallyCopyable ||
2345 return OMPC_firstprivate;
2346 DSAStackTy::DSAVarData DVar =
DSAStack->getImplicitDSA(D,
Level);
2347 if (DVar.CKind != OMPC_shared &&
2348 !
DSAStack->isLoopControlVariable(D,
Level).first && !DVar.RefExpr) {
2350 return OMPC_firstprivate;
2356 DSAStack->resetPossibleLoopCounter(D);
2358 return OMPC_private;
2361 DSAStack->isLoopControlVariable(D).first) &&
2366 return OMPC_private;
2368 if (
const auto *VD = dyn_cast<VarDecl>(D)) {
2374 return OMPC_private;
2380 DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait) ==
2381 DSAStackTy::UsesAllocatorsDeclKind::UserDefinedAllocator)
2382 return OMPC_private;
2386 (
DSAStack->isClauseParsingMode() &&
2387 DSAStack->getClauseParsingMode() == OMPC_private) ||
2392 return K == OMPD_taskgroup ||
2393 ((isOpenMPParallelDirective(K) ||
2394 isOpenMPWorksharingDirective(K)) &&
2395 !isOpenMPSimdDirective(K));
2405 assert(LangOpts.OpenMP &&
"OpenMP is not allowed");
2408 for (
unsigned I =
DSAStack->getNestingLevel() + 1; I >
Level; --I) {
2409 const unsigned NewLevel = I - 1;
2413 if (isOpenMPPrivate(K) && !AppliedToPointee) {
2421 if (
DSAStack->checkMappableExprComponentListsForDeclAtLevel(
2431 if (
DSAStack->mustBeFirstprivateAtLevel(
2433 OMPC = OMPC_firstprivate;
2437 if (OMPC != OMPC_unknown)
2438 FD->addAttr(OMPCaptureKindAttr::CreateImplicit(Context,
unsigned(OMPC)));
2442 unsigned CaptureLevel)
const {
2443 assert(
LangOpts.OpenMP &&
"OpenMP is not allowed");
2448 const auto *VD = dyn_cast<VarDecl>(D);
2449 return VD && !VD->hasLocalStorage() &&
2452 Regions[CaptureLevel] != OMPD_task;
2456 unsigned CaptureLevel)
const {
2457 assert(
LangOpts.OpenMP &&
"OpenMP is not allowed");
2460 if (
const auto *VD = dyn_cast<VarDecl>(D)) {
2461 if (!VD->hasLocalStorage()) {
2464 DSAStackTy::DSAVarData TopDVar =
2466 unsigned NumLevels =
2469 return (NumLevels == CaptureLevel + 1) && TopDVar.CKind != OMPC_shared;
2472 DSAStackTy::DSAVarData DVar =
DSAStack->getImplicitDSA(D,
Level);
2473 if (DVar.CKind != OMPC_shared)
2475 }
while (
Level > 0);
2481 void Sema::DestroyDataSharingAttributesStack() {
delete DSAStack; }
2485 OMPDeclareVariantScopes.push_back(OMPDeclareVariantScope(TI));
2490 "Not in OpenMP declare variant scope!");
2492 OMPDeclareVariantScopes.pop_back();
2498 assert(
LangOpts.OpenMP &&
"Expected OpenMP compilation mode.");
2503 (!DevTy || *DevTy == OMPDeclareTargetDeclAttr::DT_Host))
2506 if (!
LangOpts.OpenMPIsDevice && DevTy &&
2507 *DevTy == OMPDeclareTargetDeclAttr::DT_NoHost)
2510 DevTy = OMPDeclareTargetDeclAttr::getDeviceType(FD);
2511 if (
LangOpts.OpenMPIsDevice && DevTy &&
2512 *DevTy == OMPDeclareTargetDeclAttr::DT_Host) {
2514 StringRef HostDevTy =
2516 Diag(Loc, diag::err_omp_wrong_device_function_call) << HostDevTy << 0;
2517 Diag(*OMPDeclareTargetDeclAttr::getLocation(FD),
2518 diag::note_omp_marked_device_type_here)
2522 if (!
LangOpts.OpenMPIsDevice && !
LangOpts.OpenMPOffloadMandatory && DevTy &&
2523 *DevTy == OMPDeclareTargetDeclAttr::DT_NoHost) {
2526 OMPC_device_type, OMPC_DEVICE_TYPE_nohost);
2527 Diag(Loc, diag::err_omp_wrong_device_function_call) << NoHostDevTy << 1;
2528 Diag(*OMPDeclareTargetDeclAttr::getLocation(FD),
2529 diag::note_omp_marked_device_type_here)
2537 DSAStack->push(DKind, DirName, CurScope, Loc);
2547 DSAStack->setClauseParsingMode(OMPC_unknown);
2551 static std::pair<ValueDecl *, bool>
2553 SourceRange &ERange,
bool AllowArraySection =
false);
2558 bool InscanFound =
false;
2565 if (C->getClauseKind() != OMPC_reduction)
2567 auto *RC = cast<OMPReductionClause>(C);
2568 if (RC->getModifier() == OMPC_REDUCTION_inscan) {
2570 InscanLoc = RC->getModifierLoc();
2573 if (RC->getModifier() == OMPC_REDUCTION_task) {
2583 S.
Diag(RC->getModifierLoc(),
2584 diag::err_omp_reduction_task_not_parallel_or_worksharing);
2590 if (C->getClauseKind() != OMPC_reduction)
2592 auto *RC = cast<OMPReductionClause>(C);
2593 if (RC->getModifier() != OMPC_REDUCTION_inscan) {
2596 : RC->getModifierLoc(),
2597 diag::err_omp_inscan_reduction_expected);
2598 S.
Diag(InscanLoc, diag::note_omp_previous_inscan_reduction);
2601 for (
Expr *Ref : RC->varlists()) {
2602 assert(Ref &&
"NULL expr in OpenMP nontemporal clause.");
2605 Expr *SimpleRefExpr = Ref;
2612 S.
Diag(Ref->getExprLoc(),
2613 diag::err_omp_reduction_not_inclusive_exclusive)
2614 << Ref->getSourceRange();
2628 const DSAStackTy::DSAVarData &DVar,
2629 bool IsLoopIterVar =
false);
2637 if (
const auto *D = dyn_cast_or_null<OMPExecutableDirective>(CurDirective)) {
2639 if (
auto *Clause = dyn_cast<OMPLastprivateClause>(C)) {
2641 for (
Expr *DE : Clause->varlists()) {
2642 if (DE->isValueDependent() || DE->isTypeDependent()) {
2643 PrivateCopies.push_back(
nullptr);
2646 auto *DRE = cast<DeclRefExpr>(DE->IgnoreParens());
2647 auto *VD = cast<VarDecl>(DRE->getDecl());
2649 const DSAStackTy::DSAVarData DVar =
2651 if (DVar.CKind == OMPC_lastprivate) {
2658 *
this, DE->getExprLoc(),
Type.getUnqualifiedType(),
2662 PrivateCopies.push_back(
nullptr);
2666 *
this, VDPrivate, DE->
getType(), DE->getExprLoc()));
2670 PrivateCopies.push_back(
nullptr);
2673 Clause->setPrivateCopies(PrivateCopies);
2677 if (
auto *Clause = dyn_cast<OMPNontemporalClause>(C)) {
2679 for (
Expr *RefExpr : Clause->varlists()) {
2680 assert(RefExpr &&
"NULL expr in OpenMP nontemporal clause.");
2683 Expr *SimpleRefExpr = RefExpr;
2687 PrivateRefs.push_back(RefExpr);
2692 const DSAStackTy::DSAVarData DVar =
2694 PrivateRefs.push_back(DVar.PrivateCopy ? DVar.PrivateCopy
2697 Clause->setPrivateRefs(PrivateRefs);
2700 if (
auto *Clause = dyn_cast<OMPUsesAllocatorsClause>(C)) {
2701 for (
unsigned I = 0, E = Clause->getNumberOfAllocators(); I < E; ++I) {
2707 if (!VD || !isa<VarDecl>(VD))
2709 DSAStackTy::DSAVarData DVar =
2715 Expr *MapExpr =
nullptr;
2717 DSAStack->checkMappableExprComponentListsForDecl(
2723 auto MI = MapExprComponents.rbegin();
2724 auto ME = MapExprComponents.rend();
2726 MI->getAssociatedDeclaration()->getCanonicalDecl() ==
2727 VD->getCanonicalDecl()) {
2728 MapExpr = MI->getAssociatedExpression();
2733 Diag(D.Allocator->getExprLoc(),
2734 diag::err_omp_allocator_used_in_clauses)
2739 Diag(MapExpr->getExprLoc(), diag::note_used_here)
2740 << MapExpr->getSourceRange();
2753 DiscardCleanupsInEvaluationContext();
2754 PopExpressionEvaluationContext();
2758 Expr *NumIterations,
Sema &SemaRef,
2759 Scope *S, DSAStackTy *Stack);
2768 explicit VarDeclFilterCCC(
Sema &S) : SemaRef(S) {}
2771 if (
const auto *VD = dyn_cast_or_null<VarDecl>(ND)) {
2772 return VD->hasGlobalStorage() &&
2779 std::unique_ptr<CorrectionCandidateCallback>
clone()
override {
2780 return std::make_unique<VarDeclFilterCCC>(*
this);
2789 explicit VarOrFuncDeclFilterCCC(
Sema &S) : SemaRef(S) {}
2792 if (ND && ((isa<VarDecl>(ND) && ND->
getKind() == Decl::Var) ||
2793 isa<FunctionDecl>(ND))) {
2800 std::unique_ptr<CorrectionCandidateCallback>
clone()
override {
2801 return std::make_unique<VarOrFuncDeclFilterCCC>(*
this);
2819 VarDeclFilterCCC CCC(*
this);
2825 ? diag::err_undeclared_var_use_suggest
2826 : diag::err_omp_expected_var_arg_suggest)
2828 VD = Corrected.getCorrectionDeclAs<
VarDecl>();
2830 Diag(
Id.getLoc(), Lookup.
empty() ? diag::err_undeclared_var_use
2831 : diag::err_omp_expected_var_arg)
2836 Diag(
Id.getLoc(), diag::err_omp_expected_var_arg) <<
Id.getName();
2845 Diag(
Id.getLoc(), diag::err_omp_global_var_arg)
2850 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
2862 Diag(
Id.getLoc(), diag::err_omp_var_scope)
2863 << getOpenMPDirectiveName(
Kind) << VD;
2867 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
2877 Diag(
Id.getLoc(), diag::err_omp_var_scope)
2878 << getOpenMPDirectiveName(
Kind) << VD;
2882 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
2893 Diag(
Id.getLoc(), diag::err_omp_var_scope)
2894 << getOpenMPDirectiveName(
Kind) << VD;
2898 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
2907 Diag(
Id.getLoc(), diag::err_omp_var_scope)
2908 << getOpenMPDirectiveName(
Kind) << VD;
2912 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
2920 if (
Kind == OMPD_threadprivate && VD->
isUsed() &&
2922 Diag(
Id.getLoc(), diag::err_omp_var_used)
2923 << getOpenMPDirectiveName(
Kind) << VD;
2945 class LocalVarRefChecker final
2951 if (
const auto *VD = dyn_cast<VarDecl>(E->
getDecl())) {
2952 if (VD->hasLocalStorage()) {
2954 diag::err_omp_local_var_in_threadprivate_init)
2956 SemaRef.Diag(VD->
getLocation(), diag::note_defined_here)
2963 bool VisitStmt(
const Stmt *S) {
2964 for (
const Stmt *Child : S->children()) {
2965 if (Child && Visit(Child))
2970 explicit LocalVarRefChecker(
Sema &SemaRef) : SemaRef(SemaRef) {}
2977 for (
Expr *RefExpr : VarList) {
2978 auto *DE = cast<DeclRefExpr>(RefExpr);
2979 auto *VD = cast<VarDecl>(DE->getDecl());
2996 diag::err_omp_threadprivate_incomplete_type)) {
3003 Diag(ILoc, diag::err_omp_ref_type_arg)
3004 << getOpenMPDirectiveName(OMPD_threadprivate) << VD->
getType();
3008 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3016 !(VD->
hasAttr<OMPThreadPrivateDeclAttr>() &&
3020 !VD->isLocalVarDecl())) {
3021 Diag(ILoc, diag::err_omp_var_thread_local)
3026 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3033 if (
const Expr *Init = VD->getAnyInitializer()) {
3034 LocalVarRefChecker Checker(*
this);
3035 if (Checker.Visit(Init))
3039 Vars.push_back(RefExpr);
3040 DSAStack->addDSA(VD, DE, OMPC_threadprivate);
3041 VD->
addAttr(OMPThreadPrivateDeclAttr::CreateImplicit(
3044 ML->DeclarationMarkedOpenMPThreadPrivate(VD);
3047 if (!Vars.empty()) {
3055 static OMPAllocateDeclAttr::AllocatorTypeTy
3058 return OMPAllocateDeclAttr::OMPNullMemAlloc;
3059 if (Allocator->isTypeDependent() || Allocator->isValueDependent() ||
3060 Allocator->isInstantiationDependent() ||
3061 Allocator->containsUnexpandedParameterPack())
3062 return OMPAllocateDeclAttr::OMPUserDefinedMemAlloc;
3063 auto AllocatorKindRes = OMPAllocateDeclAttr::OMPUserDefinedMemAlloc;
3064 const Expr *AE = Allocator->IgnoreParenImpCasts();
3065 for (
int I = 0; I < OMPAllocateDeclAttr::OMPUserDefinedMemAlloc; ++I) {
3066 auto AllocatorKind =
static_cast<OMPAllocateDeclAttr::AllocatorTypeTy
>(I);
3067 const Expr *DefAllocator = Stack->getAllocator(AllocatorKind);
3068 llvm::FoldingSetNodeID AEId, DAEId;
3071 if (AEId == DAEId) {
3072 AllocatorKindRes = AllocatorKind;
3076 return AllocatorKindRes;
3081 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
Expr *Allocator) {
3082 if (!VD->
hasAttr<OMPAllocateDeclAttr>())
3084 const auto *A = VD->
getAttr<OMPAllocateDeclAttr>();
3085 Expr *PrevAllocator = A->getAllocator();
3086 OMPAllocateDeclAttr::AllocatorTypeTy PrevAllocatorKind =
3088 bool AllocatorsMatch = AllocatorKind == PrevAllocatorKind;
3089 if (AllocatorsMatch &&
3090 AllocatorKind == OMPAllocateDeclAttr::OMPUserDefinedMemAlloc &&
3091 Allocator && PrevAllocator) {
3092 const Expr *AE = Allocator->IgnoreParenImpCasts();
3094 llvm::FoldingSetNodeID AEId, PAEId;
3097 AllocatorsMatch = AEId == PAEId;
3099 if (!AllocatorsMatch) {
3101 llvm::raw_svector_ostream AllocatorStream(AllocatorBuffer);
3105 llvm::raw_svector_ostream PrevAllocatorStream(PrevAllocatorBuffer);
3107 PrevAllocator->printPretty(PrevAllocatorStream,
nullptr,
3111 Allocator ? Allocator->getExprLoc() : RefExpr->
getExprLoc();
3113 Allocator ? Allocator->getSourceRange() : RefExpr->
getSourceRange();
3115 PrevAllocator ? PrevAllocator->getExprLoc() : A->getLocation();
3117 PrevAllocator ? PrevAllocator->getSourceRange() : A->getRange();
3118 S.
Diag(AllocatorLoc, diag::warn_omp_used_different_allocator)
3119 << (Allocator ? 1 : 0) << AllocatorStream.str()
3120 << (PrevAllocator ? 1 : 0) << PrevAllocatorStream.str()
3122 S.
Diag(PrevAllocatorLoc, diag::note_omp_previous_allocator)
3123 << PrevAllocatorRange;
3131 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
3133 if (VD->
hasAttr<OMPAllocateDeclAttr>())
3142 (Allocator->isTypeDependent() || Allocator->isValueDependent() ||
3143 Allocator->isInstantiationDependent() ||
3144 Allocator->containsUnexpandedParameterPack()))
3146 auto *A = OMPAllocateDeclAttr::CreateImplicit(S.
Context, AllocatorKind,
3147 Allocator, Alignment, SR);
3150 ML->DeclarationMarkedOpenMPAllocate(VD, A);
3157 assert(Clauses.size() <= 2 &&
"Expected at most two clauses.");
3158 Expr *Alignment =
nullptr;
3159 Expr *Allocator =
nullptr;
3160 if (Clauses.empty()) {
3167 targetDiag(Loc, diag::err_expected_allocator_clause);
3170 if (
const auto *AC = dyn_cast<OMPAllocatorClause>(C))
3171 Allocator = AC->getAllocator();
3172 else if (
const auto *AC = dyn_cast<OMPAlignClause>(C))
3173 Alignment = AC->getAlignment();
3175 llvm_unreachable(
"Unexpected clause on allocate directive");
3177 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind =
3180 for (
Expr *RefExpr : VarList) {
3181 auto *DE = cast<DeclRefExpr>(RefExpr);
3182 auto *VD = cast<VarDecl>(DE->getDecl());
3186 VD->
hasAttr<OMPThreadPrivateDeclAttr>() ||
3188 !VD->isLocalVarDecl()))
3194 AllocatorKind, Allocator))
3201 if (Allocator && VD->hasGlobalStorage()) {
3202 if (AllocatorKind == OMPAllocateDeclAttr::OMPUserDefinedMemAlloc) {
3203 Diag(Allocator->getExprLoc(),
3204 diag::err_omp_expected_predefined_allocator)
3205 << Allocator->getSourceRange();
3206 bool IsDecl = VD->isThisDeclarationADefinition(
Context) ==
3209 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3215 Vars.push_back(RefExpr);
3234 Diag(Loc, diag::err_omp_invalid_scope) <<
"requires";
3248 bool SkippedClauses) {
3249 if (!SkippedClauses && Assumptions.empty())
3250 Diag(Loc, diag::err_omp_no_clause_for_directive)
3251 << llvm::omp::getAllAssumeClauseOptions()
3252 << llvm::omp::getOpenMPDirectiveName(DKind);
3254 auto *AA = AssumptionAttr::Create(
Context, llvm::join(Assumptions,
","), Loc);
3255 if (DKind == llvm::omp::Directive::OMPD_begin_assumes) {
3256 OMPAssumeScoped.push_back(AA);
3261 if (Assumptions.empty())
3264 assert(DKind == llvm::omp::Directive::OMPD_assumes &&
3265 "Unexpected omp assumption directive!");
3266 OMPAssumeGlobal.push_back(AA);
3274 while (Ctx->getLexicalParent())
3276 DeclContexts.push_back(Ctx);
3277 while (!DeclContexts.empty()) {
3279 for (
auto *SubDC : DC->
decls()) {
3280 if (SubDC->isInvalidDecl())
3282 if (
auto *CTD = dyn_cast<ClassTemplateDecl>(SubDC)) {
3283 DeclContexts.push_back(CTD->getTemplatedDecl());
3284 llvm::append_range(DeclContexts, CTD->specializations());
3287 if (
auto *DC = dyn_cast<DeclContext>(SubDC))
3288 DeclContexts.push_back(DC);
3289 if (
auto *F = dyn_cast<FunctionDecl>(SubDC)) {
3299 OMPAssumeScoped.pop_back();
3308 DSAStack->getEncounteredTargetLocs();
3310 if (!TargetLocations.empty() || !AtomicLoc.
isInvalid()) {
3311 for (
const OMPClause *CNew : ClauseList) {
3313 if (isa<OMPUnifiedSharedMemoryClause>(CNew) ||
3314 isa<OMPUnifiedAddressClause>(CNew) ||
3315 isa<OMPReverseOffloadClause>(CNew) ||
3316 isa<OMPDynamicAllocatorsClause>(CNew)) {
3317 Diag(Loc, diag::err_omp_directive_before_requires)
3318 <<
"target" << getOpenMPClauseName(CNew->getClauseKind());
3320 Diag(TargetLoc, diag::note_omp_requires_encountered_directive)
3324 isa<OMPAtomicDefaultMemOrderClause>(CNew)) {
3325 Diag(Loc, diag::err_omp_directive_before_requires)
3326 <<
"atomic" << getOpenMPClauseName(CNew->getClauseKind());
3327 Diag(AtomicLoc, diag::note_omp_requires_encountered_directive)
3333 if (!
DSAStack->hasDuplicateRequiresClause(ClauseList))
3341 const DSAStackTy::DSAVarData &DVar,
3342 bool IsLoopIterVar) {
3344 SemaRef.
Diag(DVar.RefExpr->getExprLoc(), diag::note_omp_explicit_dsa)
3345 << getOpenMPClauseName(DVar.CKind);
3349 PDSA_StaticMemberShared,
3350 PDSA_StaticLocalVarShared,
3351 PDSA_LoopIterVarPrivate,
3352 PDSA_LoopIterVarLinear,
3353 PDSA_LoopIterVarLastprivate,
3354 PDSA_ConstVarShared,
3355 PDSA_GlobalVarShared,
3356 PDSA_TaskVarFirstprivate,
3357 PDSA_LocalVarPrivate,
3359 } Reason = PDSA_Implicit;
3360 bool ReportHint =
false;
3362 auto *VD = dyn_cast<VarDecl>(D);
3363 if (IsLoopIterVar) {
3364 if (DVar.CKind == OMPC_private)
3365 Reason = PDSA_LoopIterVarPrivate;
3366 else if (DVar.CKind == OMPC_lastprivate)
3367 Reason = PDSA_LoopIterVarLastprivate;
3369 Reason = PDSA_LoopIterVarLinear;
3371 DVar.CKind == OMPC_firstprivate) {
3372 Reason = PDSA_TaskVarFirstprivate;
3373 ReportLoc = DVar.ImplicitDSALoc;
3374 }
else if (VD && VD->isStaticLocal())
3375 Reason = PDSA_StaticLocalVarShared;
3376 else if (VD && VD->isStaticDataMember())
3377 Reason = PDSA_StaticMemberShared;
3378 else if (VD && VD->isFileVarDecl())
3379 Reason = PDSA_GlobalVarShared;
3381 Reason = PDSA_ConstVarShared;
3382 else if (VD && VD->isLocalVarDecl() && DVar.CKind == OMPC_private) {
3384 Reason = PDSA_LocalVarPrivate;
3386 if (Reason != PDSA_Implicit) {
3387 SemaRef.
Diag(ReportLoc, diag::note_omp_predetermined_dsa)
3388 << Reason << ReportHint
3389 << getOpenMPDirectiveName(Stack->getCurrentDirective());
3390 }
else if (DVar.ImplicitDSALoc.isValid()) {
3391 SemaRef.
Diag(DVar.ImplicitDSALoc, diag::note_omp_implicit_dsa)
3392 << getOpenMPClauseName(DVar.CKind);
3398 bool IsAggregateOrDeclareTarget) {
3401 case OMPC_DEFAULTMAP_MODIFIER_alloc:
3402 Kind = OMPC_MAP_alloc;
3404 case OMPC_DEFAULTMAP_MODIFIER_to:
3407 case OMPC_DEFAULTMAP_MODIFIER_from:
3408 Kind = OMPC_MAP_from;
3410 case OMPC_DEFAULTMAP_MODIFIER_tofrom:
3411 Kind = OMPC_MAP_tofrom;
3413 case OMPC_DEFAULTMAP_MODIFIER_present:
3419 Kind = OMPC_MAP_alloc;
3421 case OMPC_DEFAULTMAP_MODIFIER_firstprivate:
3423 llvm_unreachable(
"Unexpected defaultmap implicit behavior");
3424 case OMPC_DEFAULTMAP_MODIFIER_none:
3425 case OMPC_DEFAULTMAP_MODIFIER_default:
3430 if (IsAggregateOrDeclareTarget) {
3431 Kind = OMPC_MAP_tofrom;
3434 llvm_unreachable(
"Unexpected defaultmap implicit behavior");
3441 class DSAAttrChecker final :
public StmtVisitor<DSAAttrChecker, void> {
3444 bool ErrorFound =
false;
3445 bool TryCaptureCXXThisMembers =
false;
3447 const static unsigned DefaultmapKindNum = OMPC_DEFAULTMAP_pointer + 1;
3451 ImplicitMapModifier[DefaultmapKindNum];
3453 llvm::SmallDenseSet<const ValueDecl *, 4> ImplicitDeclarations;
3457 if (!S->hasAssociatedStmt() || !S->getAssociatedStmt())
3459 if (S->getDirectiveKind() == OMPD_atomic ||
3460 S->getDirectiveKind() == OMPD_critical ||
3461 S->getDirectiveKind() == OMPD_section ||
3462 S->getDirectiveKind() == OMPD_master ||
3463 S->getDirectiveKind() == OMPD_masked ||
3465 Visit(S->getAssociatedStmt());
3468 visitSubCaptures(S->getInnermostCapturedStmt());
3471 if (TryCaptureCXXThisMembers ||
3473 llvm::any_of(S->getInnermostCapturedStmt()->captures(),
3475 return C.capturesThis();
3477 bool SavedTryCaptureCXXThisMembers = TryCaptureCXXThisMembers;
3478 TryCaptureCXXThisMembers =
true;
3479 Visit(S->getInnermostCapturedStmt()->getCapturedStmt());
3480 TryCaptureCXXThisMembers = SavedTryCaptureCXXThisMembers;
3487 if (
auto *FC = dyn_cast<OMPFirstprivateClause>(C)) {
3488 for (
Expr *Ref : FC->varlists())
3500 if (
auto *VD = dyn_cast<VarDecl>(E->
getDecl())) {
3503 !Stack->getTopDSA(VD,
false).RefExpr)) {
3504 if (
auto *CED = dyn_cast<OMPCapturedExprDecl>(VD))
3505 if (!CED->hasAttr<OMPCaptureNoInitAttr>()) {
3506 Visit(CED->getInit());
3509 }
else if (VD->
isImplicit() || isa<OMPCapturedExprDecl>(VD))
3516 !Stack->isImplicitTaskFirstprivate(VD))
3519 if (Stack->isUsesAllocatorsDecl(VD).hasValue())
3522 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(VD,
false);
3524 if (DVar.RefExpr || !ImplicitDeclarations.insert(VD).second)
3529 OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD);
3532 !Res || *Res != OMPDeclareTargetDeclAttr::MT_Link) &&
3533 !Stack->isImplicitTaskFirstprivate(VD))
3542 if (DVar.CKind == OMPC_unknown &&
3543 (Stack->getDefaultDSA() == DSA_none ||
3544 Stack->getDefaultDSA() == DSA_firstprivate) &&
3545 isImplicitOrExplicitTaskingRegion(DKind) &&
3546 VarsWithInheritedDSA.count(VD) == 0) {
3547 bool InheritedDSA = Stack->getDefaultDSA() == DSA_none;
3548 if (!InheritedDSA && Stack->getDefaultDSA() == DSA_firstprivate) {
3549 DSAStackTy::DSAVarData DVar =
3550 Stack->getImplicitDSA(VD,
false);
3551 InheritedDSA = DVar.CKind == OMPC_unknown;
3554 VarsWithInheritedDSA[VD] = E;
3569 bool IsModifierNone = Stack->getDefaultmapModifier(ClauseKind) ==
3570 OMPC_DEFAULTMAP_MODIFIER_none;
3571 if (DVar.CKind == OMPC_unknown && IsModifierNone &&
3572 VarsWithInheritedDSA.count(VD) == 0 && !Res) {
3576 if (!Stack->checkMappableExprComponentListsForDecl(
3581 auto MI = MapExprComponents.rbegin();
3582 auto ME = MapExprComponents.rend();
3583 return MI != ME && MI->getAssociatedDeclaration() == VD;
3585 VarsWithInheritedDSA[VD] = E;
3591 bool IsModifierPresent = Stack->getDefaultmapModifier(ClauseKind) ==
3592 OMPC_DEFAULTMAP_MODIFIER_present;
3593 if (IsModifierPresent) {
3594 if (llvm::find(ImplicitMapModifier[ClauseKind],
3595 OMPC_MAP_MODIFIER_present) ==
3596 std::end(ImplicitMapModifier[ClauseKind])) {
3597 ImplicitMapModifier[ClauseKind].push_back(
3598 OMPC_MAP_MODIFIER_present);
3604 !Stack->isLoopControlVariable(VD).first) {
3605 if (!Stack->checkMappableExprComponentListsForDecl(
3610 if (SemaRef.LangOpts.OpenMP >= 50)
3611 return !StackComponents.empty();
3614 return StackComponents.size() == 1 ||
3616 std::next(StackComponents.rbegin()),
3617 StackComponents.rend(),
3618 [](const OMPClauseMappableExprCommon::
3619 MappableComponent &MC) {
3620 return MC.getAssociatedDeclaration() ==
3622 (isa<OMPArraySectionExpr>(
3623 MC.getAssociatedExpression()) ||
3624 isa<OMPArrayShapingExpr>(
3625 MC.getAssociatedExpression()) ||
3626 isa<ArraySubscriptExpr>(
3627 MC.getAssociatedExpression()));
3630 bool IsFirstprivate =
false;
3632 if (
const auto *RD =
3634 IsFirstprivate = RD->isLambda();
3636 IsFirstprivate || (Stack->mustBeFirstprivate(ClauseKind) && !Res);
3637 if (IsFirstprivate) {
3638 ImplicitFirstprivate.emplace_back(E);
3641 Stack->getDefaultmapModifier(ClauseKind);
3643 M, ClauseKind == OMPC_DEFAULTMAP_aggregate || Res);
3644 ImplicitMap[ClauseKind][
Kind].emplace_back(E);
3654 DVar = Stack->hasInnermostDSA(
3657 return C == OMPC_reduction && !AppliedToPointee;
3666 SemaRef.
Diag(ELoc, diag::err_omp_reduction_in_task);
3672 DVar = Stack->getImplicitDSA(VD,
false);
3674 (Stack->getDefaultDSA() == DSA_firstprivate &&
3675 DVar.CKind == OMPC_firstprivate && !DVar.RefExpr)) &&
3676 !Stack->isLoopControlVariable(VD).first) {
3677 ImplicitFirstprivate.push_back(E);
3684 *Res == OMPDeclareTargetDeclAttr::MT_Link) {
3685 Stack->addToParentTargetRegionLinkGlobals(E);
3699 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(FD,
false);
3702 if (DVar.RefExpr || !ImplicitDeclarations.insert(FD).second)
3706 !Stack->isLoopControlVariable(FD).first &&
3707 !Stack->checkMappableExprComponentListsForDecl(
3712 return isa<CXXThisExpr>(
3714 StackComponents.back().getAssociatedExpression())
3721 if (FD->isBitField())
3726 if (Stack->isClassPreviouslyMapped(TE->getType()))
3730 Stack->getDefaultmapModifier(OMPC_DEFAULTMAP_aggregate);
3735 ImplicitMap[ClauseKind][
Kind].emplace_back(E);
3744 DVar = Stack->hasInnermostDSA(
3747 return C == OMPC_reduction && !AppliedToPointee;
3756 SemaRef.
Diag(ELoc, diag::err_omp_reduction_in_task);
3762 DVar = Stack->getImplicitDSA(FD,
false);
3764 !Stack->isLoopControlVariable(FD).first) {
3769 if (DVar.CKind != OMPC_unknown)
3770 ImplicitFirstprivate.push_back(E);
3777 Stack->getCurrentDirective(),
3780 const auto *VD = cast<ValueDecl>(
3781 CurComponents.back().getAssociatedDeclaration()->getCanonicalDecl());
3782 if (!Stack->checkMappableExprComponentListsForDecl(
3788 auto CCI = CurComponents.rbegin();
3789 auto CCE = CurComponents.rend();
3790 for (const auto &SC : llvm::reverse(StackComponents)) {
3792 if (CCI->getAssociatedExpression()->getStmtClass() !=
3793 SC.getAssociatedExpression()->getStmtClass())
3794 if (!((isa<OMPArraySectionExpr>(
3795 SC.getAssociatedExpression()) ||
3796 isa<OMPArrayShapingExpr>(
3797 SC.getAssociatedExpression())) &&
3798 isa<ArraySubscriptExpr>(
3799 CCI->getAssociatedExpression())))
3802 const Decl *CCD = CCI->getAssociatedDeclaration();
3803 const Decl *SCD = SC.getAssociatedDeclaration();
3804 CCD = CCD ? CCD->getCanonicalDecl() : nullptr;
3805 SCD = SCD ? SCD->getCanonicalDecl() : nullptr;
3808 std::advance(CCI, 1);
3816 }
else if (!TryCaptureCXXThisMembers) {
3824 if (isa_and_nonnull<OMPPrivateClause>(C))
3830 if (C && !((isa<OMPFirstprivateClause>(C) || isa<OMPMapClause>(C)) &&
3833 for (
Stmt *CC :
C->children()) {
3840 VisitSubCaptures(S);
3849 for (
Stmt *C : S->arguments()) {
3856 if (
Expr *Callee = S->getCallee())
3857 if (
auto *CE = dyn_cast<MemberExpr>(
Callee->IgnoreParenImpCasts()))
3858 Visit(CE->getBase());
3860 void VisitStmt(
Stmt *S) {
3861 for (
Stmt *C : S->children()) {
3872 if (!Cap.capturesVariable() && !Cap.capturesVariableByCopy())
3874 VarDecl *VD = Cap.getCapturedVar();
3878 Stack->checkMappableExprComponentListsForDecl(
3885 Cap.getLocation(),
true);
3889 bool isErrorFound()
const {
return ErrorFound; }
3891 return ImplicitFirstprivate;
3895 return ImplicitMap[DK][MK];
3899 return ImplicitMapModifier[
Kind];
3902 return VarsWithInheritedDSA;
3906 : Stack(S), SemaRef(SemaRef), ErrorFound(
false), CS(CS) {
3921 Traits.emplace_back(llvm::omp::TraitProperty::construct_target_target);
3923 Traits.emplace_back(llvm::omp::TraitProperty::construct_teams_teams);
3925 Traits.emplace_back(llvm::omp::TraitProperty::construct_parallel_parallel);
3927 Traits.emplace_back(llvm::omp::TraitProperty::construct_for_for);
3929 Traits.emplace_back(llvm::omp::TraitProperty::construct_simd_simd);
3930 Stack->handleConstructTrait(Traits, ScopeEntry);
3936 case OMPD_parallel_for:
3937 case OMPD_parallel_for_simd:
3938 case OMPD_parallel_sections:
3939 case OMPD_parallel_master:
3940 case OMPD_parallel_loop:
3942 case OMPD_teams_distribute:
3943 case OMPD_teams_distribute_simd: {
3948 std::make_pair(
".global_tid.", KmpInt32PtrTy),
3949 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
3950 std::make_pair(StringRef(),
QualType())
3956 case OMPD_target_teams:
3957 case OMPD_target_parallel:
3958 case OMPD_target_parallel_for:
3959 case OMPD_target_parallel_for_simd:
3960 case OMPD_target_teams_loop:
3961 case OMPD_target_parallel_loop:
3962 case OMPD_target_teams_distribute:
3963 case OMPD_target_teams_distribute_simd: {
3973 std::make_pair(
".global_tid.", KmpInt32Ty),
3974 std::make_pair(
".part_id.", KmpInt32PtrTy),
3975 std::make_pair(
".privates.", VoidPtrTy),
3980 std::make_pair(StringRef(),
QualType())
3987 AlwaysInlineAttr::CreateImplicit(
3989 AlwaysInlineAttr::Keyword_forceinline));
3991 std::make_pair(StringRef(),
QualType())
3997 std::make_pair(
".global_tid.", KmpInt32PtrTy),
3998 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
3999 std::make_pair(StringRef(),
QualType())
4004 ParamsTeamsOrParallel, 2);
4008 case OMPD_target_simd: {
4018 std::make_pair(
".global_tid.", KmpInt32Ty),
4019 std::make_pair(
".part_id.", KmpInt32PtrTy),
4020 std::make_pair(
".privates.", VoidPtrTy),
4025 std::make_pair(StringRef(),
QualType())
4032 AlwaysInlineAttr::CreateImplicit(
4034 AlwaysInlineAttr::Keyword_forceinline));
4036 std::make_pair(StringRef(),
QualType()),
4056 case OMPD_taskgroup:
4057 case OMPD_distribute:
4058 case OMPD_distribute_simd:
4060 case OMPD_target_data:
4061 case OMPD_dispatch: {
4063 std::make_pair(StringRef(),
QualType())
4079 std::make_pair(
".global_tid.", KmpInt32Ty),
4080 std::make_pair(
".part_id.", KmpInt32PtrTy),
4081 std::make_pair(
".privates.", VoidPtrTy),
4086 std::make_pair(StringRef(),
QualType())
4093 AlwaysInlineAttr::CreateImplicit(
4095 AlwaysInlineAttr::Keyword_forceinline));
4099 case OMPD_taskloop_simd:
4100 case OMPD_master_taskloop:
4101 case OMPD_master_taskloop_simd: {
4119 std::make_pair(
".global_tid.", KmpInt32Ty),
4120 std::make_pair(
".part_id.", KmpInt32PtrTy),
4121 std::make_pair(
".privates.", VoidPtrTy),
4126 std::make_pair(
".lb.", KmpUInt64Ty),
4127 std::make_pair(
".ub.", KmpUInt64Ty),
4128 std::make_pair(
".st.", KmpInt64Ty),
4129 std::make_pair(
".liter.", KmpInt32Ty),
4130 std::make_pair(
".reductions.", VoidPtrTy),
4131 std::make_pair(StringRef(),
QualType())
4138 AlwaysInlineAttr::CreateImplicit(
4140 AlwaysInlineAttr::Keyword_forceinline));
4143 case OMPD_parallel_master_taskloop:
4144 case OMPD_parallel_master_taskloop_simd: {
4158 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4159 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4160 std::make_pair(StringRef(),
QualType())
4170 std::make_pair(
".global_tid.", KmpInt32Ty),
4171 std::make_pair(
".part_id.", KmpInt32PtrTy),
4172 std::make_pair(
".privates.", VoidPtrTy),
4177 std::make_pair(
".lb.", KmpUInt64Ty),
4178 std::make_pair(
".ub.", KmpUInt64Ty),
4179 std::make_pair(
".st.", KmpInt64Ty),
4180 std::make_pair(
".liter.", KmpInt32Ty),
4181 std::make_pair(
".reductions.", VoidPtrTy),
4182 std::make_pair(StringRef(),
QualType())
4189 AlwaysInlineAttr::CreateImplicit(
4191 AlwaysInlineAttr::Keyword_forceinline));
4194 case OMPD_distribute_parallel_for_simd:
4195 case OMPD_distribute_parallel_for: {
4200 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4201 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4204 std::make_pair(StringRef(),
QualType())
4210 case OMPD_target_teams_distribute_parallel_for:
4211 case OMPD_target_teams_distribute_parallel_for_simd: {
4222 std::make_pair(
".global_tid.", KmpInt32Ty),
4223 std::make_pair(
".part_id.", KmpInt32PtrTy),
4224 std::make_pair(
".privates.", VoidPtrTy),
4229 std::make_pair(StringRef(),
QualType())
4236 AlwaysInlineAttr::CreateImplicit(
4238 AlwaysInlineAttr::Keyword_forceinline));
4240 std::make_pair(StringRef(),
QualType())
4247 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4248 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4249 std::make_pair(StringRef(),
QualType())
4256 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4257 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4260 std::make_pair(StringRef(),
QualType())
4269 case OMPD_teams_loop: {
4275 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4276 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4277 std::make_pair(StringRef(),
QualType())
4285 case OMPD_teams_distribute_parallel_for:
4286 case OMPD_teams_distribute_parallel_for_simd: {
4292 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4293 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4294 std::make_pair(StringRef(),
QualType())
4301 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4302 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4305 std::make_pair(StringRef(),
QualType())
4313 case OMPD_target_update:
4314 case OMPD_target_enter_data:
4315 case OMPD_target_exit_data: {
4325 std::make_pair(
".global_tid.", KmpInt32Ty),
4326 std::make_pair(
".part_id.", KmpInt32PtrTy),
4327 std::make_pair(
".privates.", VoidPtrTy),
4332 std::make_pair(StringRef(),
QualType())
4339 AlwaysInlineAttr::CreateImplicit(
4341 AlwaysInlineAttr::Keyword_forceinline));
4344 case OMPD_threadprivate:
4346 case OMPD_taskyield:
4349 case OMPD_cancellation_point:
4354 case OMPD_declare_reduction:
4355 case OMPD_declare_mapper:
4356 case OMPD_declare_simd:
4357 case OMPD_declare_target:
4358 case OMPD_end_declare_target:
4360 case OMPD_declare_variant:
4361 case OMPD_begin_declare_variant:
4362 case OMPD_end_declare_variant:
4363 case OMPD_metadirective:
4364 llvm_unreachable(
"OpenMP Directive is not allowed");
4367 llvm_unreachable(
"Unknown OpenMP directive");
4373 int Sema::getNumberOfConstructScopes(
unsigned Level)
const {
4380 return CaptureRegions.size();
4384 Expr *CaptureExpr,
bool WithInit,
4385 bool AsExpression) {
4386 assert(CaptureExpr);
4392 Ty = C.getLValueReferenceType(Ty);
4394 Ty = C.getPointerType(Ty);
4406 CED->addAttr(OMPCaptureNoInitAttr::CreateImplicit(C));
4417 CD = cast<OMPCapturedExprDecl>(VD);
4454 class CaptureRegionUnwinderRAII {
4461 CaptureRegionUnwinderRAII(
Sema &S,
bool &ErrorFound,
4463 : S(S), ErrorFound(ErrorFound), DKind(DKind) {}
4464 ~CaptureRegionUnwinderRAII() {
4467 while (--ThisCaptureLevel >= 0)
4480 DSAStack->getCurrentDirective()))) {
4482 if (
const auto *RD =
Type.getCanonicalType()
4483 .getNonReferenceType()
4485 bool SavedForceCaptureByReferenceInTargetExecutable =
4486 DSAStack->isForceCaptureByReferenceInTargetExecutable();
4487 DSAStack->setForceCaptureByReferenceInTargetExecutable(
4489 if (RD->isLambda()) {
4490 llvm::DenseMap<const VarDecl *, FieldDecl *> Captures;
4492 RD->getCaptureFields(Captures, ThisCapture);
4495 VarDecl *VD = LC.getCapturedVar();
4500 }
else if (LC.getCaptureKind() ==
LCK_This) {
4508 DSAStack->setForceCaptureByReferenceInTargetExecutable(
4509 SavedForceCaptureByReferenceInTargetExecutable);
4519 for (
const OMPClause *Clause : Clauses) {
4521 Ordered = cast<OMPOrderedClause>(Clause);
4523 Order = cast<OMPOrderClause>(Clause);
4524 if (Order->
getKind() != OMPC_ORDER_concurrent)
4527 if (Ordered && Order)
4531 if (Ordered && Order) {
4533 diag::err_omp_simple_clause_incompatible_with_ordered)
4534 << getOpenMPClauseName(OMPC_order)
4548 if (
DSAStack->getCurrentDirective() == OMPD_atomic ||
4549 DSAStack->getCurrentDirective() == OMPD_critical ||
4550 DSAStack->getCurrentDirective() == OMPD_section ||
4551 DSAStack->getCurrentDirective() == OMPD_master ||
4552 DSAStack->getCurrentDirective() == OMPD_masked)
4555 bool ErrorFound =
false;
4556 CaptureRegionUnwinderRAII CaptureRegionUnwinder(
4557 *
this, ErrorFound,
DSAStack->getCurrentDirective());
4558 if (!S.isUsable()) {
4576 auto *IRC = cast<OMPInReductionClause>(Clause);
4577 for (
Expr *E : IRC->taskgroup_descriptors())
4589 if (
auto *E = cast_or_null<Expr>(VarRef)) {
4593 DSAStack->setForceVarCapturing(
false);
4595 DSAStack->getCurrentDirective())) {
4596 assert(CaptureRegions.empty() &&
4597 "No captured regions in loop transformation directives.");
4598 }
else if (CaptureRegions.size() > 1 ||
4599 CaptureRegions.back() != OMPD_unknown) {
4603 if (
Expr *E = C->getPostUpdateExpr())
4608 SC = cast<OMPScheduleClause>(Clause);
4610 OC = cast<OMPOrderedClause>(Clause);
4612 LCs.push_back(cast<OMPLinearClause>(Clause));
4623 OMPC_SCHEDULE_MODIFIER_nonmonotonic) &&
4628 diag::err_omp_simple_clause_incompatible_with_ordered)
4629 << getOpenMPClauseName(OMPC_schedule)
4631 OMPC_SCHEDULE_MODIFIER_nonmonotonic)
4642 Diag(C->getBeginLoc(), diag::err_omp_linear_ordered)
4651 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective());
4658 unsigned CompletedRegions = 0;
4663 if (ThisCaptureRegion != OMPD_unknown) {
4671 if (CaptureRegion == ThisCaptureRegion ||
4672 CaptureRegion == OMPD_unknown) {
4673 if (
auto *DS = cast_or_null<DeclStmt>(C->getPreInitStmt())) {
4674 for (
Decl *D : DS->decls())
4680 if (ThisCaptureRegion == OMPD_target) {
4684 if (
const auto *UAC = dyn_cast<OMPUsesAllocatorsClause>(C)) {
4685 for (
unsigned I = 0,
End = UAC->getNumberOfAllocators(); I <
End;
4695 if (ThisCaptureRegion == OMPD_parallel) {
4699 if (
auto *RC = dyn_cast<OMPReductionClause>(C)) {
4700 if (RC->getModifier() != OMPC_REDUCTION_inscan)
4702 for (
Expr *E : RC->copy_array_temps())
4705 if (
auto *AC = dyn_cast<OMPAlignedClause>(C)) {
4706 for (
Expr *E : AC->varlists())
4711 if (++CompletedRegions == CaptureRegions.size())
4722 if (CurrentRegion != OMPD_cancel && CurrentRegion != OMPD_cancellation_point)
4725 if (CancelRegion == OMPD_parallel || CancelRegion == OMPD_for ||
4726 CancelRegion == OMPD_sections || CancelRegion == OMPD_taskgroup)
4729 SemaRef.
Diag(StartLoc, diag::err_omp_wrong_cancel_region)
4730 << getOpenMPDirectiveName(CancelRegion);
4740 if (Stack->getCurScope()) {
4743 bool NestingProhibited =
false;
4744 bool CloseNesting =
true;
4745 bool OrphanSeen =
false;
4748 ShouldBeInParallelRegion,
4749 ShouldBeInOrderedRegion,
4750 ShouldBeInTargetRegion,
4751 ShouldBeInTeamsRegion,
4752 ShouldBeInLoopSimdRegion,
4753 } Recommend = NoRecommend;
4755 ((SemaRef.
LangOpts.OpenMP <= 45 && CurrentRegion != OMPD_ordered) ||
4756 (SemaRef.
LangOpts.OpenMP >= 50 && CurrentRegion != OMPD_ordered &&
4757 CurrentRegion != OMPD_simd && CurrentRegion != OMPD_atomic &&
4758 CurrentRegion != OMPD_scan))) {
4771 SemaRef.
Diag(StartLoc, (CurrentRegion != OMPD_simd)
4772 ? diag::err_omp_prohibited_region_simd
4773 : diag::warn_omp_nesting_simd)
4774 << (SemaRef.
LangOpts.OpenMP >= 50 ? 1 : 0);
4775 return CurrentRegion != OMPD_simd;
4777 if (ParentRegion == OMPD_atomic) {
4780 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region_atomic);
4783 if (CurrentRegion == OMPD_section) {
4788 if (ParentRegion != OMPD_sections &&
4789 ParentRegion != OMPD_parallel_sections) {
4790 SemaRef.
Diag(StartLoc, diag::err_omp_orphaned_section_directive)
4791 << (ParentRegion != OMPD_unknown)
4792 << getOpenMPDirectiveName(ParentRegion);
4800 if (ParentRegion == OMPD_unknown &&
4802 CurrentRegion != OMPD_cancellation_point &&
4803 CurrentRegion != OMPD_cancel && CurrentRegion != OMPD_scan)
4805 if (CurrentRegion == OMPD_cancellation_point ||
4806 CurrentRegion == OMPD_cancel) {
4819 !((CancelRegion == OMPD_parallel &&
4820 (ParentRegion == OMPD_parallel ||
4821 ParentRegion == OMPD_target_parallel)) ||
4822 (CancelRegion == OMPD_for &&
4823 (ParentRegion == OMPD_for || ParentRegion == OMPD_parallel_for ||
4824 ParentRegion == OMPD_target_parallel_for ||
4825 ParentRegion == OMPD_distribute_parallel_for ||
4826 ParentRegion == OMPD_teams_distribute_parallel_for ||
4827 ParentRegion == OMPD_target_teams_distribute_parallel_for)) ||
4828 (CancelRegion == OMPD_taskgroup &&
4829 (ParentRegion == OMPD_task ||
4831 (ParentRegion == OMPD_taskloop ||
4832 ParentRegion == OMPD_master_taskloop ||
4833 ParentRegion == OMPD_parallel_master_taskloop)))) ||
4834 (CancelRegion == OMPD_sections &&
4835 (ParentRegion == OMPD_section || ParentRegion == OMPD_sections ||
4836 ParentRegion == OMPD_parallel_sections)));
4837 OrphanSeen = ParentRegion == OMPD_unknown;
4838 }
else if (CurrentRegion == OMPD_master || CurrentRegion == OMPD_masked) {
4845 }
else if (CurrentRegion == OMPD_critical && CurrentName.
getName()) {
4851 bool DeadLock = Stack->hasDirective(
4855 if (K == OMPD_critical && DNI.
getName() == CurrentName.
getName()) {
4856 PreviousCriticalLoc = Loc;
4863 SemaRef.
Diag(StartLoc,
4864 diag::err_omp_prohibited_region_critical_same_name)
4866 if (PreviousCriticalLoc.
isValid())
4867 SemaRef.
Diag(PreviousCriticalLoc,
4868 diag::note_omp_previous_critical_region);
4871 }
else if (CurrentRegion == OMPD_barrier) {
4879 ParentRegion == OMPD_master || ParentRegion == OMPD_masked ||
4880 ParentRegion == OMPD_parallel_master ||
4881 ParentRegion == OMPD_critical || ParentRegion == OMPD_ordered;
4893 ParentRegion == OMPD_master || ParentRegion == OMPD_masked ||
4894 ParentRegion == OMPD_parallel_master ||
4895 ParentRegion == OMPD_critical || ParentRegion == OMPD_ordered;
4896 Recommend = ShouldBeInParallelRegion;
4897 }
else if (CurrentRegion == OMPD_ordered) {
4906 NestingProhibited = ParentRegion == OMPD_critical ||
4909 Stack->isParentOrderedRegion());
4910 Recommend = ShouldBeInOrderedRegion;
4916 (SemaRef.
LangOpts.OpenMP <= 45 && ParentRegion != OMPD_target) ||
4917 (SemaRef.
LangOpts.OpenMP >= 50 && ParentRegion != OMPD_unknown &&
4918 ParentRegion != OMPD_target);
4919 OrphanSeen = ParentRegion == OMPD_unknown;
4920 Recommend = ShouldBeInTargetRegion;
4921 }
else if (CurrentRegion == OMPD_scan) {
4927 (ParentRegion != OMPD_simd && ParentRegion != OMPD_for &&
4928 ParentRegion != OMPD_for_simd && ParentRegion != OMPD_parallel_for &&
4929 ParentRegion != OMPD_parallel_for_simd);
4930 OrphanSeen = ParentRegion == OMPD_unknown;
4931 Recommend = ShouldBeInLoopSimdRegion;
4933 if (!NestingProhibited &&
4936 (ParentRegion == OMPD_teams || ParentRegion == OMPD_target_teams)) {
4946 CurrentRegion != OMPD_loop;
4947 Recommend = ShouldBeInParallelRegion;
4949 if (!NestingProhibited && CurrentRegion == OMPD_loop) {
4954 NestingProhibited = BindKind == OMPC_BIND_teams &&
4955 ParentRegion != OMPD_teams &&
4956 ParentRegion != OMPD_target_teams;
4957 Recommend = ShouldBeInTeamsRegion;
4959 if (!NestingProhibited &&
4965 (ParentRegion != OMPD_teams && ParentRegion != OMPD_target_teams);
4966 Recommend = ShouldBeInTeamsRegion;
4968 if (!NestingProhibited &&
4975 NestingProhibited = Stack->hasDirective(
4979 OffendingRegion = K;
4985 CloseNesting =
false;
4987 if (NestingProhibited) {
4989 SemaRef.
Diag(StartLoc, diag::err_omp_orphaned_device_directive)
4990 << getOpenMPDirectiveName(CurrentRegion) << Recommend;
4992 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region)
4993 << CloseNesting << getOpenMPDirectiveName(OffendingRegion)
4994 << Recommend << getOpenMPDirectiveName(CurrentRegion);
5009 bool ErrorFound =
false;
5010 unsigned NamedModifiersNumber = 0;
5011 llvm::IndexedMap<const OMPIfClause *, Kind2Unsigned> FoundNameModifiers;
5012 FoundNameModifiers.resize(llvm::omp::Directive_enumSize + 1);
5015 if (
const auto *IC = dyn_cast_or_null<OMPIfClause>(C)) {
5019 if (FoundNameModifiers[CurNM]) {
5020 S.
Diag(C->getBeginLoc(), diag::err_omp_more_one_clause)
5021 << getOpenMPDirectiveName(
Kind) << getOpenMPClauseName(OMPC_if)
5022 << (CurNM != OMPD_unknown) << getOpenMPDirectiveName(CurNM);
5024 }
else if (CurNM != OMPD_unknown) {
5025 NameModifierLoc.push_back(IC->getNameModifierLoc());
5026 ++NamedModifiersNumber;
5028 FoundNameModifiers[CurNM] = IC;
5029 if (CurNM == OMPD_unknown)
5035 if (!llvm::is_contained(AllowedNameModifiers, CurNM)) {
5036 S.
Diag(IC->getNameModifierLoc(),
5037 diag::err_omp_wrong_if_directive_name_modifier)
5038 << getOpenMPDirectiveName(CurNM) << getOpenMPDirectiveName(
Kind);
5045 if (FoundNameModifiers[OMPD_unknown] && NamedModifiersNumber > 0) {
5046 if (NamedModifiersNumber == AllowedNameModifiers.size()) {
5047 S.
Diag(FoundNameModifiers[OMPD_unknown]->getBeginLoc(),
5048 diag::err_omp_no_more_if_clause);
5052 unsigned AllowedCnt = 0;
5053 unsigned TotalAllowedNum =
5054 AllowedNameModifiers.size() - NamedModifiersNumber;
5055 for (
unsigned Cnt = 0,
End = AllowedNameModifiers.size(); Cnt <
End;
5058 if (!FoundNameModifiers[NM]) {
5060 Values += getOpenMPDirectiveName(NM);
5062 if (AllowedCnt + 2 == TotalAllowedNum)
5064 else if (AllowedCnt + 1 != TotalAllowedNum)
5069 S.
Diag(FoundNameModifiers[OMPD_unknown]->getCondition()->getBeginLoc(),
5070 diag::err_omp_unnamed_if_clause)
5071 << (TotalAllowedNum > 1) << Values;
5074 S.
Diag(Loc, diag::note_omp_previous_named_if_clause);
5084 bool AllowArraySection) {
5087 return std::make_pair(
nullptr,
true);
5099 } IsArrayExpr = NoArrayExpr;
5100 if (AllowArraySection) {
5101 if (
auto *ASE = dyn_cast_or_null<ArraySubscriptExpr>(RefExpr)) {
5102 Expr *
Base = ASE->getBase()->IgnoreParenImpCasts();
5103 while (
auto *TempASE = dyn_cast<ArraySubscriptExpr>(
Base))
5104 Base = TempASE->getBase()->IgnoreParenImpCasts();
5106 IsArrayExpr = ArraySubscript;
5107 }
else if (
auto *OASE = dyn_cast_or_null<OMPArraySectionExpr>(RefExpr)) {
5108 Expr *
Base = OASE->getBase()->IgnoreParenImpCasts();
5109 while (
auto *TempOASE = dyn_cast<OMPArraySectionExpr>(
Base))
5110 Base = TempOASE->getBase()->IgnoreParenImpCasts();
5111 while (
auto *TempASE = dyn_cast<ArraySubscriptExpr>(
Base))
5112 Base = TempASE->getBase()->IgnoreParenImpCasts();
5114 IsArrayExpr = OMPArraySection;
5120 auto *DE = dyn_cast_or_null<DeclRefExpr>(RefExpr);
5121 auto *ME = dyn_cast_or_null<MemberExpr>(RefExpr);
5122 if ((!DE || !isa<VarDecl>(DE->getDecl())) &&
5124 !isa<CXXThisExpr>(ME->getBase()->IgnoreParenImpCasts()) ||
5125 !isa<FieldDecl>(ME->getMemberDecl()))) {
5126 if (IsArrayExpr != NoArrayExpr) {
5127 S.
Diag(ELoc, diag::err_omp_expected_base_var_name)
5128 << IsArrayExpr << ERange;
5132 ? diag::err_omp_expected_var_name_member_expr_or_array_item
5133 : diag::err_omp_expected_var_name_member_expr)
5136 return std::make_pair(
nullptr,
false);
5138 return std::make_pair(
5145 class AllocatorChecker final :
public ConstStmtVisitor<AllocatorChecker, bool> {
5146 DSAStackTy *S =
nullptr;
5150 return S->isUsesAllocatorsDecl(E->
getDecl())
5152 DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait) ==
5153 DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait;
5155 bool VisitStmt(
const Stmt *S) {
5156 for (
const Stmt *Child : S->children()) {
5157 if (Child && Visit(Child))
5162 explicit AllocatorChecker(DSAStackTy *S) : S(S) {}
5169 "Expected non-dependent context.");
5170 auto AllocateRange =
5173 auto PrivateRange = llvm::make_filter_range(Clauses, [](
const OMPClause *C) {
5178 if (
Cl->getClauseKind() == OMPC_private) {
5179 auto *PC = cast<OMPPrivateClause>(
Cl);
5180 I = PC->private_copies().begin();
5181 It = PC->varlist_begin();
5182 Et = PC->varlist_end();
5183 }
else if (
Cl->getClauseKind() == OMPC_firstprivate) {
5184 auto *PC = cast<OMPFirstprivateClause>(
Cl);
5185 I = PC->private_copies().begin();
5186 It = PC->varlist_begin();
5187 Et = PC->varlist_end();
5188 }
else if (
Cl->getClauseKind() == OMPC_lastprivate) {
5189 auto *PC = cast<OMPLastprivateClause>(
Cl);
5190 I = PC->private_copies().begin();
5191 It = PC->varlist_begin();
5192 Et = PC->varlist_end();
5193 }
else if (
Cl->getClauseKind() == OMPC_linear) {
5194 auto *PC = cast<OMPLinearClause>(
Cl);
5195 I = PC->privates().begin();
5196 It = PC->varlist_begin();
5197 Et = PC->varlist_end();
5198 }
else if (
Cl->getClauseKind() == OMPC_reduction) {
5199 auto *PC = cast<OMPReductionClause>(
Cl);
5200 I = PC->privates().begin();
5201 It = PC->varlist_begin();
5202 Et = PC->varlist_end();
5203 }
else if (
Cl->getClauseKind() == OMPC_task_reduction) {
5204 auto *PC = cast<OMPTaskReductionClause>(
Cl);
5205 I = PC->privates().begin();
5206 It = PC->varlist_begin();
5207 Et = PC->varlist_end();
5208 }
else if (
Cl->getClauseKind() == OMPC_in_reduction) {
5209 auto *PC = cast<OMPInReductionClause>(
Cl);
5210 I = PC->privates().begin();
5211 It = PC->varlist_begin();
5212 Et = PC->varlist_end();
5214 llvm_unreachable(
"Expected private clause.");
5216 for (
Expr *E : llvm::make_range(It, Et)) {
5223 Expr *SimpleRefExpr = E;
5226 DeclToCopy.try_emplace(Res.first,
5227 cast<VarDecl>(cast<DeclRefExpr>(*I)->getDecl()));
5232 auto *AC = cast<OMPAllocateClause>(C);
5236 AC->getAllocator()) {
5237 Expr *Allocator = AC->getAllocator();
5243 AllocatorChecker Checker(Stack);
5244 if (Checker.Visit(Allocator))
5245 S.
Diag(Allocator->getExprLoc(),
5246 diag::err_omp_allocator_not_in_uses_allocators)
5247 << Allocator->getSourceRange();
5249 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind =
5255 if (AllocatorKind == OMPAllocateDeclAttr::OMPThreadMemAlloc &&
5258 S.
Diag(AC->getAllocator()->getExprLoc(),
5259 diag::warn_omp_allocate_thread_on_task_target_directive)
5260 << getOpenMPDirectiveName(Stack->getCurrentDirective());
5262 for (
Expr *E : AC->varlists()) {
5265 Expr *SimpleRefExpr = E;
5268 DSAStackTy::DSAVarData Data = Stack->getTopDSA(VD,
false);
5271 diag::err_omp_expected_private_copy_for_allocate);
5274 VarDecl *PrivateVD = DeclToCopy[VD];
5276 AllocatorKind, AC->getAllocator()))
5279 Expr *Alignment =
nullptr;
5296 CaptureVars(
Sema &Actions) : BaseTransform(Actions) {}
5298 bool AlwaysRebuild() {
return true; }
5311 BodyStmts.push_back(NewDeclStmt);
5349 DistParam, LogicalTy,
VK_LValue, {},
nullptr,
nullptr, {},
nullptr);
5360 auto BuildVarRef = [&](
VarDecl *VD) {
5373 Actions.
BuildBinOp(
nullptr, {}, BO_LT, BuildVarRef(NewStep),
Zero));
5377 nullptr, {}, BO_Sub, BuildVarRef(NewStop), BuildVarRef(NewStart)));
5381 nullptr, {}, BO_Div, ForwardRange, BuildVarRef(NewStep)));
5385 nullptr, {}, BO_Sub, BuildVarRef(NewStart), BuildVarRef(NewStop)));
5389 Actions.
BuildUnaryOp(
nullptr, {}, UO_Minus, BuildVarRef(NewStep)));
5391 Actions.
BuildBinOp(
nullptr, {}, BO_Div, BackwardRange, NegIncAmount));
5395 {}, {}, IsNegStep, BackwardDist, ForwardDist));
5397 assert((Rel == BO_LT || Rel == BO_LE || Rel == BO_GE || Rel == BO_GT) &&
5398 "Expected one of these relational operators");
5405 nullptr, {}, Rel, BuildVarRef(NewStart), BuildVarRef(NewStop)));
5409 if (Rel == BO_GE || Rel == BO_GT)
5411 nullptr, {}, BO_Sub, BuildVarRef(NewStart), BuildVarRef(NewStop)));
5414 nullptr, {}, BO_Sub, BuildVarRef(NewStop), BuildVarRef(NewStart)));
5420 if (Rel == BO_LE || Rel == BO_GE) {
5431 Expr *Divisor = BuildVarRef(NewStep);
5432 if (Rel == BO_GE || Rel == BO_GT)
5435 Expr *DivisorMinusOne =
5438 Actions.
BuildBinOp(
nullptr, {}, BO_Add, Range, DivisorMinusOne));
5440 Actions.
BuildBinOp(
nullptr, {}, BO_Div, RangeRoundUp, Divisor));
5450 Actions.
getCurScope(), {}, BO_Assign, DistRef, Dist));
5451 BodyStmts.push_back(ResultAssign);
5456 return cast<CapturedStmt>(
5483 {
"Logical", LogicalTy},
5494 assert(!Invalid &&
"Expecting capture-by-value to work.");
5499 auto *CS = cast<CapturedDecl>(Actions.
CurContext);
5503 TargetParam, LoopVarTy,
VK_LValue, {},
nullptr,
nullptr, {},
nullptr);
5506 IndvarParam, LogicalTy,
VK_LValue, {},
nullptr,
nullptr, {},
nullptr);
5509 CaptureVars Recap(Actions);
5514 Actions.
BuildBinOp(
nullptr, {}, BO_Mul, NewStep, LogicalRef));
5529 BO_Assign, TargetRef, Advanced));
5531 return cast<CapturedStmt>(
5542 if (
auto *For = dyn_cast<ForStmt>(AStmt)) {
5543 Stmt *Init = For->getInit();
5544 if (
auto *LCVarDeclStmt = dyn_cast<DeclStmt>(Init)) {
5546 LIVDecl = cast<VarDecl>(LCVarDeclStmt->getSingleDecl());
5547 }
else if (
auto *LCAssign = dyn_cast<BinaryOperator>(Init)) {
5549 assert(LCAssign->getOpcode() == BO_Assign &&
5550 "init part must be a loop variable assignment");
5551 auto *CounterRef = cast<DeclRefExpr>(LCAssign->getLHS());
5552 LIVDecl = cast<VarDecl>(CounterRef->getDecl());
5554 llvm_unreachable(
"Cannot determine loop variable");
5557 Cond = For->getCond();
5558 Inc = For->getInc();
5559 }
else if (
auto *RangeFor = dyn_cast<CXXForRangeStmt>(AStmt)) {
5560 DeclStmt *BeginStmt = RangeFor->getBeginStmt();
5562 LUVDecl = RangeFor->getLoopVariable();
5564 Cond = RangeFor->getCond();
5565 Inc = RangeFor->getInc();
5567 llvm_unreachable(
"unhandled kind of loop");
5576 if (
auto *CondBinExpr = dyn_cast<BinaryOperator>(Cond)) {
5577 LHS = CondBinExpr->getLHS();
5578 RHS = CondBinExpr->getRHS();
5579 CondRel = CondBinExpr->getOpcode();
5580 }
else if (
auto *CondCXXOp = dyn_cast<CXXOperatorCallExpr>(Cond)) {
5581 assert(CondCXXOp->getNumArgs() == 2 &&
"Comparison should have 2 operands");
5582 LHS = CondCXXOp->getArg(0);
5583 RHS = CondCXXOp->getArg(1);
5584 switch (CondCXXOp->getOperator()) {
5585 case OO_ExclaimEqual:
5597 case OO_GreaterEqual:
5601 llvm_unreachable(
"unexpected iterator operator");
5604 llvm_unreachable(
"unexpected loop condition");
5608 cast<DeclRefExpr>(LHS->
IgnoreImplicit())->getDecl() != LIVDecl) {
5609 std::swap(LHS, RHS);
5626 if (
auto *IncUn = dyn_cast<UnaryOperator>(Inc)) {
5628 switch (IncUn->getOpcode()) {
5638 llvm_unreachable(
"unhandled unary increment operator");
5642 }
else if (
auto *IncBin = dyn_cast<BinaryOperator>(Inc)) {
5643 if (IncBin->getOpcode() == BO_AddAssign) {
5644 Step = IncBin->getRHS();
5645 }
else if (IncBin->getOpcode() == BO_SubAssign) {
5649 llvm_unreachable(
"unhandled binary increment operator");
5650 }
else if (
auto *CondCXXOp = dyn_cast<CXXOperatorCallExpr>(Inc)) {
5651 switch (CondCXXOp->getOperator()) {
5661 Step = CondCXXOp->getArg(1);
5665 BuildUnaryOp(
nullptr, {}, UO_Minus, CondCXXOp->getArg(1)));
5668 llvm_unreachable(
"unhandled overloaded increment operator");
5671 llvm_unreachable(
"unknown increment expression");
5676 *
this, LVTy, LogicalTy, CounterRef, Step, isa<CXXForRangeStmt>(AStmt));
5678 {},
nullptr,
nullptr, {},
nullptr);
5680 LoopVarFunc, LVRef);
5685 if (isa<ForStmt>(AStmt) || isa<CXXForRangeStmt>(AStmt))
5692 "Loop transformation directive expected");
5693 return LoopTransform;
5700 Expr *UnresolvedMapper);
5712 for (
int Cnt = 0, EndCnt = Clauses.size(); Cnt < EndCnt; ++Cnt) {
5713 auto *C = dyn_cast<OMPMapClause>(Clauses[Cnt]);
5717 auto *MI = C->mapperlist_begin();
5718 for (
auto I = C->varlist_begin(),
End = C->varlist_end(); I !=
End;
5738 ElemType = ATy->getElementType();
5741 CanonType = ElemType;
5746 1, {CanonType,
nullptr});
5747 llvm::DenseMap<const Type *, Expr *> Visited;
5750 while (!Types.empty()) {
5753 std::tie(BaseType, CurFD) = Types.pop_back_val();
5754 while (ParentChain.back().second == 0)
5755 ParentChain.pop_back();
5756 --ParentChain.back().second;
5763 auto It = Visited.find(BaseType.
getTypePtr());
5764 if (It == Visited.end()) {
5772 S, Stack->getCurScope(), MapperIdScopeSpec, DefaultMapperId,
5776 It = Visited.try_emplace(BaseType.
getTypePtr(), ER.
get()).first;
5783 Expr *BaseExpr = OE;
5784 for (
const auto &
P : ParentChain) {
5802 SubExprs.push_back(BaseExpr);
5806 bool FirstIter =
true;
5816 ParentChain.emplace_back(CurFD, 1);
5818 ++ParentChain.back().second;
5820 Types.emplace_back(FieldTy, FD);
5824 if (SubExprs.empty())
5829 C->getMapTypeModifiers(), C->getMapTypeModifiersLoc(),
5830 MapperIdScopeSpec, MapperId, C->getMapType(),
5833 Clauses.push_back(NewClause);
5844 OMPExecutableDirective::getSingleClause<OMPBindClause>(Clauses))
5845 BindKind = BC->getBindKind();
5849 BindKind, StartLoc))
5854 bool ErrorFound =
false;
5855 ClausesWithImplicit.append(Clauses.begin(), Clauses.end());
5857 Kind != OMPD_critical &&
Kind != OMPD_section &&
Kind != OMPD_master &&
5859 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
5862 DSAAttrChecker DSAChecker(
DSAStack, *
this, cast<CapturedStmt>(AStmt));
5865 while (--ThisCaptureLevel >= 0)
5866 S = cast<CapturedStmt>(S)->getCapturedStmt();
5867 DSAChecker.Visit(S);
5871 auto *CS = cast<CapturedStmt>(AStmt);
5875 if (CaptureRegions.size() > 1 && CaptureRegions.front() == OMPD_task)
5877 DSAChecker.visitSubCaptures(CS);
5879 if (DSAChecker.isErrorFound())
5882 VarsWithInheritedDSA = DSAChecker.getVarsWithInheritedDSA();
5885 DSAChecker.getImplicitFirstprivate().begin(),
5886 DSAChecker.getImplicitFirstprivate().end());
5887 const unsigned DefaultmapKindNum = OMPC_DEFAULTMAP_pointer + 1;
5890 ImplicitMapModifiers[DefaultmapKindNum];
5892 ImplicitMapModifiersLoc[DefaultmapKindNum];
5896 if (
auto *DMC = dyn_cast<OMPDefaultmapClause>(C))
5897 if (DMC->getDefaultmapModifier() == OMPC_DEFAULTMAP_MODIFIER_present)
5898 PresentModifierLocs[DMC->getDefaultmapKind()] =
5899 DMC->getDefaultmapModifierLoc();
5901 for (
unsigned VC = 0; VC < DefaultmapKindNum; ++VC) {
5903 for (
unsigned I = 0; I < OMPC_MAP_delete; ++I) {
5906 ImplicitMaps[VC][I].append(ImplicitMap.begin(), ImplicitMap.end());
5909 DSAChecker.getImplicitMapModifier(
Kind);
5910 ImplicitMapModifiers[VC].append(ImplicitModifier.begin(),
5911 ImplicitModifier.end());
5912 std::fill_n(std::back_inserter(ImplicitMapModifiersLoc[VC]),
5913 ImplicitModifier.size(), PresentModifierLocs[VC]);
5917 if (
auto *IRC = dyn_cast<OMPInReductionClause>(C)) {
5918 for (
Expr *E : IRC->taskgroup_descriptors())
5920 ImplicitFirstprivates.emplace_back(E);
5925 if (
auto *DC = dyn_cast<OMPDetachClause>(C))
5926 ImplicitFirstprivates.push_back(DC->getEventHandler());
5928 if (!ImplicitFirstprivates.empty()) {
5932 ClausesWithImplicit.push_back(Implicit);
5933 ErrorFound = cast<OMPFirstprivateClause>(Implicit)->varlist_size() !=
5934 ImplicitFirstprivates.size();
5947 if (
auto *RC = dyn_cast<OMPReductionClause>(C))
5948 for (
Expr *E : RC->varlists())
5950 ImplicitExprs.emplace_back(E);
5952 if (!ImplicitExprs.empty()) {
5958 MapperId, OMPC_MAP_tofrom,
5961 ClausesWithImplicit.emplace_back(Implicit);
5964 for (
unsigned I = 0, E = DefaultmapKindNum; I < E; ++I) {
5965 int ClauseKindCnt = -1;
5968 if (ImplicitMap.empty())
5974 ImplicitMapModifiers[I], ImplicitMapModifiersLoc[I],
5975 MapperIdScopeSpec, MapperId,
Kind,
true,
5978 ClausesWithImplicit.emplace_back(Implicit);
5979 ErrorFound |= cast<OMPMapClause>(Implicit)->varlist_size() !=
5990 ClausesWithImplicit);
5998 AllowedNameModifiers.push_back(OMPD_parallel);
6002 VarsWithInheritedDSA);
6004 AllowedNameModifiers.push_back(OMPD_simd);
6016 VarsWithInheritedDSA);
6020 EndLoc, VarsWithInheritedDSA);
6022 AllowedNameModifiers.push_back(OMPD_simd);
6029 assert(ClausesWithImplicit.empty() &&
6030 "No clauses are allowed for 'omp section' directive");
6038 assert(ClausesWithImplicit.empty() &&
6039 "No clauses are allowed for 'omp master' directive");
6050 case OMPD_parallel_for:
6052 EndLoc, VarsWithInheritedDSA);
6053 AllowedNameModifiers.push_back(OMPD_parallel);
6055 case OMPD_parallel_for_simd:
6057 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6058 AllowedNameModifiers.push_back(OMPD_parallel);
6060 AllowedNameModifiers.push_back(OMPD_simd);
6062 case OMPD_parallel_master:
6065 AllowedNameModifiers.push_back(OMPD_parallel);
6067 case OMPD_parallel_sections:
6070 AllowedNameModifiers.push_back(OMPD_parallel);
6075 AllowedNameModifiers.push_back(OMPD_task);
6077 case OMPD_taskyield:
6078 assert(ClausesWithImplicit.empty() &&
6079 "No clauses are allowed for 'omp taskyield' directive");
6080 assert(AStmt ==
nullptr &&
6081 "No associated statement allowed for 'omp taskyield' directive");
6085 assert(ClausesWithImplicit.empty() &&
6086 "No clauses are allowed for 'omp barrier' directive");
6087 assert(AStmt ==
nullptr &&
6088 "No associated statement allowed for 'omp barrier' directive");
6092 assert(AStmt ==
nullptr &&
6093 "No associated statement allowed for 'omp taskwait' directive");
6096 case OMPD_taskgroup:
6101 assert(AStmt ==
nullptr &&
6102 "No associated statement allowed for 'omp flush' directive");
6106 assert(AStmt ==
nullptr &&
6107 "No associated statement allowed for 'omp depobj' directive");
6111 assert(AStmt ==
nullptr &&
6112 "No associated statement allowed for 'omp scan' directive");
6130 AllowedNameModifiers.push_back(OMPD_target);
6132 case OMPD_target_parallel:
6135 AllowedNameModifiers.push_back(OMPD_target);
6136 AllowedNameModifiers.push_back(OMPD_parallel);
6138 case OMPD_target_parallel_for:
6140 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6141 AllowedNameModifiers.push_back(OMPD_target);
6142 AllowedNameModifiers.push_back(OMPD_parallel);
6144 case OMPD_cancellation_point:
6145 assert(ClausesWithImplicit.empty() &&
6146 "No clauses are allowed for 'omp cancellation point' directive");
6147 assert(AStmt ==
nullptr &&
"No associated statement allowed for 'omp "
6148 "cancellation point' directive");
6152 assert(AStmt ==
nullptr &&
6153 "No associated statement allowed for 'omp cancel' directive");
6156 AllowedNameModifiers.push_back(OMPD_cancel);
6158 case OMPD_target_data:
6161 AllowedNameModifiers.push_back(OMPD_target_data);
6163 case OMPD_target_enter_data:
6166 AllowedNameModifiers.push_back(OMPD_target_enter_data);
6168 case OMPD_target_exit_data:
6171 AllowedNameModifiers.push_back(OMPD_target_exit_data);
6175 EndLoc, VarsWithInheritedDSA);
6176 AllowedNameModifiers.push_back(OMPD_taskloop);
6178 case OMPD_taskloop_simd:
6180 EndLoc, VarsWithInheritedDSA);
6181 AllowedNameModifiers.push_back(OMPD_taskloop);
6183 AllowedNameModifiers.push_back(OMPD_simd);
6185 case OMPD_master_taskloop:
6187 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6188 AllowedNameModifiers.push_back(OMPD_taskloop);
6190 case OMPD_master_taskloop_simd:
6192 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6193 AllowedNameModifiers.push_back(OMPD_taskloop);
6195 AllowedNameModifiers.push_back(OMPD_simd);
6197 case OMPD_parallel_master_taskloop:
6199 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6200 AllowedNameModifiers.push_back(OMPD_taskloop);
6201 AllowedNameModifiers.push_back(OMPD_parallel);
6203 case OMPD_parallel_master_taskloop_simd:
6205 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6206 AllowedNameModifiers.push_back(OMPD_taskloop);
6207 AllowedNameModifiers.push_back(OMPD_parallel);
6209 AllowedNameModifiers.push_back(OMPD_simd);
6211 case OMPD_distribute:
6213 EndLoc, VarsWithInheritedDSA);
6215 case OMPD_target_update:
6218 AllowedNameModifiers.push_back(OMPD_target_update);
6220 case OMPD_distribute_parallel_for:
6222 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6223 AllowedNameModifiers.push_back(OMPD_parallel);
6225 case OMPD_distribute_parallel_for_simd:
6227 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6228 AllowedNameModifiers.push_back(OMPD_parallel);
6230 AllowedNameModifiers.push_back(OMPD_simd);
6232 case OMPD_distribute_simd:
6234 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6236 AllowedNameModifiers.push_back(OMPD_simd);
6238 case OMPD_target_parallel_for_simd:
6240 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6241 AllowedNameModifiers.push_back(OMPD_target);
6242 AllowedNameModifiers.push_back(OMPD_parallel);
6244 AllowedNameModifiers.push_back(OMPD_simd);
6246 case OMPD_target_simd:
6248 EndLoc, VarsWithInheritedDSA);
6249 AllowedNameModifiers.push_back(OMPD_target);
6251 AllowedNameModifiers.push_back(OMPD_simd);
6253 case OMPD_teams_distribute:
6255 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6257 case OMPD_teams_distribute_simd:
6259 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6261 AllowedNameModifiers.push_back(OMPD_simd);
6263 case OMPD_teams_distribute_parallel_for_simd:
6265 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6266 AllowedNameModifiers.push_back(OMPD_parallel);
6268 AllowedNameModifiers.push_back(OMPD_simd);
6270 case OMPD_teams_distribute_parallel_for:
6272 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6273 AllowedNameModifiers.push_back(OMPD_parallel);
6275 case OMPD_target_teams:
6278 AllowedNameModifiers.push_back(OMPD_target);
6280 case OMPD_target_teams_distribute:
6282 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6283 AllowedNameModifiers.push_back(OMPD_target);
6285 case OMPD_target_teams_distribute_parallel_for:
6287 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6288 AllowedNameModifiers.push_back(OMPD_target);
6289 AllowedNameModifiers.push_back(OMPD_parallel);
6291 case OMPD_target_teams_distribute_parallel_for_simd:
6293 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6294 AllowedNameModifiers.push_back(OMPD_target);
6295 AllowedNameModifiers.push_back(OMPD_parallel);
6297 AllowedNameModifiers.push_back(OMPD_simd);
6299 case OMPD_target_teams_distribute_simd:
6301 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6302 AllowedNameModifiers.push_back(OMPD_target);
6304 AllowedNameModifiers.push_back(OMPD_simd);
6307 assert(AStmt ==
nullptr &&
6308 "No associated statement allowed for 'omp interop' directive");
6317 EndLoc, VarsWithInheritedDSA);
6319 case OMPD_teams_loop:
6321 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6323 case OMPD_target_teams_loop:
6325 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6327 case OMPD_parallel_loop:
6329 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6331 case OMPD_target_parallel_loop:
6333 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6335 case OMPD_declare_target:
6336 case OMPD_end_declare_target:
6337 case OMPD_threadprivate:
6339 case OMPD_declare_reduction:
6340 case OMPD_declare_mapper:
6341 case OMPD_declare_simd:
6343 case OMPD_declare_variant:
6344 case OMPD_begin_declare_variant:
6345 case OMPD_end_declare_variant:
6346 llvm_unreachable(
"OpenMP Directive is not allowed");
6349 llvm_unreachable(
"Unknown OpenMP directive");
6352 ErrorFound = Res.
isInvalid() || ErrorFound;
6356 if (
DSAStack->getDefaultDSA() == DSA_none ||
6357 DSAStack->getDefaultDSA() == DSA_firstprivate) {
6358 DSAAttrChecker DSAChecker(
DSAStack, *
this,
nullptr);
6360 switch (C->getClauseKind()) {
6361 case OMPC_num_threads:
6362 case OMPC_dist_schedule:
6369 cast<OMPIfClause>(C)->getNameModifier() != OMPD_target)
6373 cast<OMPIfClause>(C)->getNameModifier() != OMPD_parallel)
6379 case OMPC_grainsize:
6380 case OMPC_num_tasks:
6383 case OMPC_novariants:
6384 case OMPC_nocontext:
6391 case OMPC_num_teams:
6392 case OMPC_thread_limit:
6399 case OMPC_proc_bind:
6401 case OMPC_firstprivate:
6402 case OMPC_lastprivate:
6404 case OMPC_reduction:
6405 case OMPC_task_reduction:
6406 case OMPC_in_reduction:
6410 case OMPC_copyprivate:
6413 case OMPC_mergeable:
6430 case OMPC_defaultmap:
6433 case OMPC_use_device_ptr:
6434 case OMPC_use_device_addr:
6435 case OMPC_is_device_ptr:
6436 case OMPC_nontemporal:
6439 case OMPC_inclusive:
6440 case OMPC_exclusive:
6441 case OMPC_uses_allocators:
6445 case OMPC_allocator:
6448 case OMPC_threadprivate:
6451 case OMPC_unified_address:
6452 case OMPC_unified_shared_memory:
6453 case OMPC_reverse_offload:
6454 case OMPC_dynamic_allocators:
6455 case OMPC_atomic_default_mem_order:
6456 case OMPC_device_type:
6460 llvm_unreachable(
"Unexpected clause");
6462 for (
Stmt *CC : C->children()) {
6464 DSAChecker.Visit(CC);
6467 for (
const auto &
P : DSAChecker.getVarsWithInheritedDSA())
6468 VarsWithInheritedDSA[
P.getFirst()] =
P.getSecond();
6470 for (
const auto &
P : VarsWithInheritedDSA) {
6471 if (
P.getFirst()->isImplicit() || isa<OMPCapturedExprDecl>(
P.getFirst()))
6474 if (
DSAStack->getDefaultDSA() == DSA_none ||
6475 DSAStack->getDefaultDSA() == DSA_firstprivate) {
6476 Diag(
P.second->getExprLoc(), diag::err_omp_no_dsa_for_variable)
6477 <<
P.first <<
P.second->getSourceRange();
6478 Diag(
DSAStack->getDefaultDSALocation(), diag::note_omp_default_dsa_none);
6480 Diag(
P.second->getExprLoc(),
6481 diag::err_omp_defaultmap_no_attr_for_variable)
6482 <<
P.first <<
P.second->getSourceRange();
6484 diag::note_omp_defaultmap_attr_none);
6488 if (!AllowedNameModifiers.empty())
6502 DSAStack->addTargetDirLocation(StartLoc);
6513 assert(Aligneds.size() == Alignments.size());
6514 assert(Linears.size() == LinModifiers.size());
6515 assert(Linears.size() == Steps.size());
6516 if (!DG || DG.
get().isNull())
6519 const int SimdId = 0;
6520 if (!DG.
get().isSingleDecl()) {
6521 Diag(SR.
getBegin(), diag::err_omp_single_decl_in_declare_simd_variant)
6525 Decl *ADecl = DG.
get().getSingleDecl();
6526 if (
auto *FTD = dyn_cast<FunctionTemplateDecl>(ADecl))
6527 ADecl = FTD->getTemplatedDecl();
6529 auto *FD = dyn_cast<FunctionDecl>(ADecl);
6531 Diag(ADecl->
getLocation(), diag::err_omp_function_expected) << SimdId;
6540 SL = VerifyPositiveIntegerConstantInClause(Simdlen, OMPC_simdlen);
6547 llvm::DenseMap<const Decl *, const Expr *> UniformedArgs;
6548 const Expr *UniformedLinearThis =
nullptr;
6549 for (
const Expr *E : Uniforms) {
6551 if (
const auto *DRE = dyn_cast<DeclRefExpr>(E))
6552 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl()))
6553 if (FD->getNumParams() > PVD->getFunctionScopeIndex() &&
6554 FD->getParamDecl(PVD->getFunctionScopeIndex())
6555 ->getCanonicalDecl() == PVD->getCanonicalDecl()) {
6556 UniformedArgs.try_emplace(PVD->getCanonicalDecl(), E);
6559 if (isa<CXXThisExpr>(E)) {
6560 UniformedLinearThis = E;
6564 << FD->getDeclName() << (isa<CXXMethodDecl>(ADecl) ? 1 : 0);
6574 llvm::DenseMap<const Decl *, const Expr *> AlignedArgs;
6575 const Expr *AlignedThis =
nullptr;
6576 for (
const Expr *E : Aligneds) {
6578 if (
const auto *DRE = dyn_cast<DeclRefExpr>(E))
6579 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl())) {
6581 if (FD->getNumParams() > PVD->getFunctionScopeIndex() &&
6582 FD->getParamDecl(PVD->getFunctionScopeIndex())
6583 ->getCanonicalDecl() == CanonPVD) {
6586 if (AlignedArgs.count(CanonPVD) > 0) {
6588 << 1 << getOpenMPClauseName(OMPC_aligned)
6590 Diag(AlignedArgs[CanonPVD]->getExprLoc(),
6591 diag::note_omp_explicit_dsa)
6592 << getOpenMPClauseName(OMPC_aligned);
6595 AlignedArgs[CanonPVD] = E;
6597 .getNonReferenceType()
6598 .getUnqualifiedType()
6599 .getCanonicalType();
6602 Diag(E->
getExprLoc(), diag::err_omp_aligned_expected_array_or_ptr)
6604 Diag(PVD->getLocation(), diag::note_previous_decl) << PVD;
6609 if (isa<CXXThisExpr>(E)) {
6612 << 2 << getOpenMPClauseName(OMPC_aligned) << E->
getSourceRange();
6614 << getOpenMPClauseName(OMPC_aligned);
6620 << FD->getDeclName() << (isa<CXXMethodDecl>(ADecl) ? 1 : 0);
6627 for (
Expr *E : Alignments) {
6630 Align = VerifyPositiveIntegerConstantInClause(E, OMPC_aligned);
6631 NewAligns.push_back(Align.
get());
6642 llvm::DenseMap<const Decl *, const Expr *> LinearArgs;
6643 const bool IsUniformedThis = UniformedLinearThis !=
nullptr;
6644 auto MI = LinModifiers.begin();
6645 for (
const Expr *E : Linears) {
6649 if (
const auto *DRE = dyn_cast<DeclRefExpr>(E))
6650 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl())) {
6652 if (FD->getNumParams() > PVD->getFunctionScopeIndex() &&
6653 FD->getParamDecl(PVD->getFunctionScopeIndex())
6654 ->getCanonicalDecl() == CanonPVD) {
6657 if (LinearArgs.count(CanonPVD) > 0) {
6659 << getOpenMPClauseName(OMPC_linear)
6661 Diag(LinearArgs[CanonPVD]->getExprLoc(),
6662 diag::note_omp_explicit_dsa)
6663 << getOpenMPClauseName(OMPC_linear);
6667 if (UniformedArgs.count(CanonPVD) > 0) {
6669 << getOpenMPClauseName(OMPC_linear)
6671 Diag(UniformedArgs[CanonPVD]->getExprLoc(),
6672 diag::note_omp_explicit_dsa)
6673 << getOpenMPClauseName(OMPC_uniform);
6676 LinearArgs[CanonPVD] = E;
6682 PVD->getOriginalType(),
6687 if (isa<CXXThisExpr>(E)) {
6688 if (UniformedLinearThis) {
6690 << getOpenMPClauseName(OMPC_linear)
6691 << getOpenMPClauseName(IsUniformedThis ? OMPC_uniform : OMPC_linear)
6693 Diag(UniformedLinearThis->
getExprLoc(), diag::note_omp_explicit_dsa)
6694 << getOpenMPClauseName(IsUniformedThis ? OMPC_uniform
6698 UniformedLinearThis = E;
6707 << FD->getDeclName() << (isa<CXXMethodDecl>(ADecl) ? 1 : 0);
6709 Expr *Step =
nullptr;
6710 Expr *NewStep =
nullptr;
6712 for (
Expr *E : Steps) {
6714 if (Step == E || !E) {
6715 NewSteps.push_back(E ? NewStep :
nullptr);
6719 if (
const auto *DRE = dyn_cast<DeclRefExpr>(Step))
6720 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl())) {
6722 if (UniformedArgs.count(CanonPVD) == 0) {
6729 NewSteps.push_back(Step);
6746 NewSteps.push_back(NewStep);
6748 auto *NewAttr = OMPDeclareSimdDeclAttr::CreateImplicit(
6750 Uniforms.size(),
const_cast<Expr **
>(Aligneds.data()), Aligneds.size(),
6751 const_cast<Expr **
>(NewAligns.data()), NewAligns.size(),
6752 const_cast<Expr **
>(Linears.data()), Linears.size(),
6753 const_cast<unsigned *
>(LinModifiers.data()), LinModifiers.size(),
6754 NewSteps.data(), NewSteps.size(), SR);
6762 "Expected function type with prototype.");
6764 "Expected function with type with no prototype.");
6766 "Expected function with prototype.");
6774 Param->setScopeInfo(0, Params.size());
6775 Param->setImplicit();
6776 Params.push_back(Param);
6779 FD->setParams(Params);
6786 if (
auto *UTemplDecl = dyn_cast<FunctionTemplateDecl>(D))
6787 FD = UTemplDecl->getTemplatedDecl();
6789 FD = cast<FunctionDecl>(D);
6790 assert(FD &&
"Expected a function declaration!");
6796 for (AssumptionAttr *AA : OMPAssumeScoped)
6799 for (AssumptionAttr *AA : OMPAssumeGlobal)
6803 Sema::OMPDeclareVariantScope::OMPDeclareVariantScope(
OMPTraitInfo &TI)
6804 : TI(&TI), NameSuffix(TI.getMangledName()) {}
6812 OMPDeclareVariantScope &DVScope = OMPDeclareVariantScopes.back();
6815 bool IsTemplated = !TemplateParamLists.empty();
6817 !DVScope.TI->isExtensionActive(
6818 llvm::omp::TraitProperty::implementation_extension_allow_templates))
6834 for (
auto *Candidate : Lookup) {
6835 auto *CandidateDecl = Candidate->getUnderlyingDecl();
6837 if (IsTemplated && isa<FunctionTemplateDecl>(CandidateDecl)) {
6838 auto *FTD = cast<FunctionTemplateDecl>(CandidateDecl);
6839 if (FTD->getTemplateParameters()->size() == TemplateParamLists.size())
6840 UDecl = FTD->getTemplatedDecl();
6841 }
else if (!IsTemplated)
6842 UDecl = dyn_cast<FunctionDecl>(CandidateDecl);
6856 FType, UDeclTy,
false,
6863 Bases.push_back(UDecl);
6866 bool UseImplicitBase = !DVScope.TI->isExtensionActive(
6867 llvm::omp::TraitProperty::implementation_extension_disable_implicit_base);
6869 if (Bases.empty() && UseImplicitBase) {
6873 if (
auto *BaseTemplD = dyn_cast<FunctionTemplateDecl>(BaseD))
6874 Bases.push_back(BaseTemplD->getTemplatedDecl());
6876 Bases.push_back(cast<FunctionDecl>(BaseD));
6882 MangledName += DVScope.NameSuffix;
6897 if (
auto *UTemplDecl = dyn_cast<FunctionTemplateDecl>(D))
6898 FD = UTemplDecl->getTemplatedDecl();
6900 FD = cast<FunctionDecl>(D);
6907 OMPDeclareVariantScope &DVScope = OMPDeclareVariantScopes.back();
6908 auto *OMPDeclareVariantA = OMPDeclareVariantAttr::CreateImplicit(
6909 Context, VariantFuncRef, DVScope.TI,
6914 BaseFD->addAttr(OMPDeclareVariantA);
6923 CallExpr *CE = dyn_cast<CallExpr>(Call.get());
6931 if (!CalleeFnDecl->
hasAttr<OMPDeclareVariantAttr>())
6935 std::function<void(StringRef)> DiagUnknownTrait = [
this,
6936 CE](StringRef ISATrait) {
6949 while (CalleeFnDecl) {
6950 for (OMPDeclareVariantAttr *A :
6952 Expr *VariantRef = A->getVariantFuncRef();
6954 VariantMatchInfo VMI;
6957 if (!isVariantApplicableInContext(VMI, OMPCtx,
6961 VMIs.push_back(VMI);
6962 Exprs.push_back(VariantRef);
6970 int BestIdx = getBestVariantMatchForContext(VMIs, OMPCtx);
6973 Expr *BestExpr = cast<DeclRefExpr>(Exprs[BestIdx]);
6974 Decl *BestDecl = cast<DeclRefExpr>(BestExpr)->getDecl();
6990 if (
auto *SpecializedMethod = dyn_cast<CXXMethodDecl>(BestDecl)) {
6991 auto *MemberCall = dyn_cast<CXXMemberCallExpr>(CE);
6993 Context, MemberCall->getImplicitObjectArgument(),
6995 MemberCall->getValueKind(), MemberCall->getObjectKind());
7000 if (
CallExpr *NCE = dyn_cast<CallExpr>(NewCall.
get())) {
7001 FunctionDecl *NewCalleeFnDecl = NCE->getDirectCallee();
7003 CalleeFnType, NewCalleeFnDecl->
getType(),
7014 VMIs.erase(VMIs.begin() + BestIdx);
7015 Exprs.erase(Exprs.begin() + BestIdx);
7016 }
while (!VMIs.empty());
7026 unsigned NumAppendArgs,
7028 if (!DG || DG.
get().isNull())
7031 const int VariantId = 1;
7033 if (!DG.
get().isSingleDecl()) {
7034 Diag(SR.
getBegin(), diag::err_omp_single_decl_in_declare_simd_variant)
7038 Decl *ADecl = DG.
get().getSingleDecl();
7039 if (
auto *FTD = dyn_cast<FunctionTemplateDecl>(ADecl))
7040 ADecl = FTD->getTemplatedDecl();
7043 auto *FD = dyn_cast<FunctionDecl>(ADecl);
7050 auto &&HasMultiVersionAttributes = [](
const FunctionDecl *FD) {
7053 return FD->isMultiVersion() || FD->hasAttr<TargetAttr>();
7056 if (HasMultiVersionAttributes(FD)) {
7057 Diag(FD->getLocation(), diag::err_omp_declare_variant_incompat_attributes)
7063 if (FD->isUsed(
false))
7064 Diag(SR.
getBegin(), diag::warn_omp_declare_variant_after_used)
7065 << FD->getLocation();
7069 if (!FD->isThisDeclarationADefinition() && FD->isDefined(Definition) &&
7071 Diag(SR.
getBegin(), diag::warn_omp_declare_variant_after_emitted)
7072 << FD->getLocation();
7076 Diag(SR.
getBegin(), diag::err_omp_function_expected) << VariantId;
7080 auto ShouldDelayChecks = [](
Expr *&E,
bool) {
7086 if (FD->isDependentContext() || ShouldDelayChecks(VariantRef,
false) ||
7088 return std::make_pair(FD, VariantRef);
7091 auto HandleNonConstantScoresAndConditions = [
this](
Expr *&E,
7092 bool IsScore) ->
bool {
7098 Diag(E->
getExprLoc(), diag::warn_omp_declare_variant_score_not_constant)
7106 diag::err_omp_declare_variant_user_condition_not_constant)
7114 QualType AdjustedFnType = FD->getType();
7115 if (NumAppendArgs) {
7118 Diag(FD->getLocation(), diag::err_omp_declare_variant_prototype_required)
7129 TD = dyn_cast_or_null<TypeDecl>(ND);
7132 Diag(SR.
getBegin(), diag::err_omp_interop_type_not_found) << SR;
7136 if (PTy->isVariadic()) {
7137 Diag(FD->getLocation(), diag::err_omp_append_args_with_varargs) << SR;
7141 Params.append(PTy->param_type_begin(), PTy->param_type_end());
7142 Params.insert(Params.end(), NumAppendArgs, InteropType);
7144 PTy->getExtProtoInfo());
7152 auto *Method = dyn_cast<CXXMethodDecl>(FD);
7153 if (Method && !Method->isStatic()) {
7154 const Type *ClassType =
7170 VariantRef = ER.
get();
7184 diag::err_omp_declare_variant_incompat_types)
7186 << ((Method && !Method->isStatic()) ? FnPtrType : FD->getType())
7196 if (Method && !Method->isStatic()) {
7197 Expr *PossibleAddrOfVariantRef = VariantRefCast.
get();
7198 if (
auto *UO = dyn_cast<UnaryOperator>(
7200 VariantRefCast = UO->getSubExpr();
7219 auto *NewFD = dyn_cast_or_null<FunctionDecl>(DRE->getDecl());
7226 if (FD->getCanonicalDecl() == NewFD->getCanonicalDecl()) {
7228 diag::err_omp_declare_variant_same_base_function)
7239 diag::err_omp_declare_variant_incompat_types)
7240 << NewFD->getType() << FD->getType() << (NumAppendArgs ? 1 : 0)
7245 if (FD->getType()->isFunctionNoProtoType())
7247 else if (NewFD->getType()->isFunctionNoProtoType())
7253 if (NewFD->hasAttrs() && NewFD->hasAttr<OMPDeclareVariantAttr>()) {
7255 diag::warn_omp_declare_variant_marked_as_declare_variant)
7258 NewFD->specific_attr_begin<OMPDeclareVariantAttr>()->getRange();
7259 Diag(SR.
getBegin(), diag::note_omp_marked_declare_variant_here) << SR;
7263 enum DoesntSupport {
7272 if (
const auto *CXXFD = dyn_cast<CXXMethodDecl>(FD)) {
7273 if (CXXFD->isVirtual()) {
7274 Diag(FD->getLocation(), diag::err_omp_declare_variant_doesnt_support)
7279 if (isa<CXXConstructorDecl>(FD)) {
7280 Diag(FD->getLocation(), diag::err_omp_declare_variant_doesnt_support)
7285 if (isa<CXXDestructorDecl>(FD)) {
7286 Diag(FD->getLocation(), diag::err_omp_declare_variant_doesnt_support)
7292 if (FD->isDeleted()) {
7293 Diag(FD->getLocation(), diag::err_omp_declare_variant_doesnt_support)
7298 if (FD->isDefaulted()) {
7299 Diag(FD->getLocation(), diag::err_omp_declare_variant_doesnt_support)
7304 if (FD->isConstexpr()) {
7305 Diag(FD->getLocation(), diag::err_omp_declare_variant_doesnt_support)
7306 << (NewFD->isConsteval() ? ConstevalFuncs : ConstexprFuncs);
7317 PDiag(diag::err_omp_declare_variant_doesnt_support)),
7319 PDiag(diag::err_omp_declare_variant_diff)
7320 << FD->getLocation()),
7324 return std::make_pair(FD, cast<Expr>(DRE));
7341 llvm::append_range(AllAdjustArgs, AdjustArgsNothing);
7342 llvm::append_range(AllAdjustArgs, AdjustArgsNeedDevicePtr);
7344 if (!AllAdjustArgs.empty() || !AppendArgs.empty()) {
7345 VariantMatchInfo VMI;
7347 if (!llvm::is_contained(
7348 VMI.ConstructTraits,
7349 llvm::omp::TraitProperty::construct_dispatch_dispatch)) {
7350 if (!AllAdjustArgs.empty())
7351 Diag(AdjustArgsLoc, diag::err_omp_clause_requires_dispatch_construct)
7352 << getOpenMPClauseName(OMPC_adjust_args);
7353 if (!AppendArgs.empty())
7354 Diag(AppendArgsLoc, diag::err_omp_clause_requires_dispatch_construct)
7355 << getOpenMPClauseName(OMPC_append_args);
7365 for (
Expr *E : AllAdjustArgs) {
7367 if (
const auto *DRE = dyn_cast<DeclRefExpr>(E)) {
7368 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl())) {
7370 if (FD->
getNumParams() > PVD->getFunctionScopeIndex() &&
7374 if (!AdjustVars.insert(CanonPVD).second) {
7375 Diag(DRE->getLocation(), diag::err_omp_adjust_arg_multiple_clauses)
7384 Diag(E->
getExprLoc(), diag::err_omp_param_or_this_in_clause) << FD << 0;
7388 auto *NewAttr = OMPDeclareVariantAttr::CreateImplicit(
7389 Context, VariantRef, &TI,
const_cast<Expr **
>(AdjustArgsNothing.data()),
7390 AdjustArgsNothing.size(),
7391 const_cast<Expr **
>(AdjustArgsNeedDevicePtr.data()),
7392 AdjustArgsNeedDevicePtr.size(),
7393 const_cast<OMPDeclareVariantAttr::InteropType *
>(AppendArgs.data()),
7394 AppendArgs.size(), SR);
7405 auto *CS = cast<CapturedStmt>(AStmt);
7416 DSAStack->getTaskgroupReductionRef(),
7422 struct LoopIterationSpace final {
7425 bool IsStrictCompare =
false;
7427 Expr *PreCond =
nullptr;
7430 Expr *NumIterations =
nullptr;
7432 Expr *CounterVar =
nullptr;
7434 Expr *PrivateCounterVar =
nullptr;
7436 Expr *CounterInit =
nullptr;
7439 Expr *CounterStep =
nullptr;
7441 bool Subtract =
false;
7451 Expr *MinValue =
nullptr;
7455 Expr *MaxValue =
nullptr;
7457 bool IsNonRectangularLB =
false;
7459 bool IsNonRectangularUB =
false;
7462 unsigned LoopDependentIdx = 0;
7466 Expr *FinalCondition =
nullptr;
7472 class OpenMPIterationSpaceChecker {
7476 bool SupportsNonRectangular;
7492 Expr *LCRef =
nullptr;
7498 Expr *Step =
nullptr;
7507 bool TestIsStrictOp =
false;
7509 bool SubtractStep =
false;
7525 OpenMPIterationSpaceChecker(
Sema &SemaRef,
bool SupportsNonRectangular,
7527 : SemaRef(SemaRef), SupportsNonRectangular(SupportsNonRectangular),
7528 Stack(Stack), DefaultLoc(DefaultLoc), ConditionLoc(DefaultLoc) {}
7531 bool checkAndSetInit(
Stmt *S,
bool EmitDiags =
true);
7534 bool checkAndSetCond(
Expr *S);
7537 bool checkAndSetInc(
Expr *S);
7539 ValueDecl *getLoopDecl()
const {
return LCDecl; }
7541 Expr *getLoopDeclRefExpr()
const {
return LCRef; }
7543 SourceRange getInitSrcRange()
const {
return InitSrcRange; }
7545 SourceRange getConditionSrcRange()
const {
return ConditionSrcRange; }
7547 SourceRange getIncrementSrcRange()
const {
return IncrementSrcRange; }
7549 bool shouldSubtractStep()
const {
return SubtractStep; }
7551 bool isStrictTestOp()
const {
return TestIsStrictOp; }
7553 Expr *buildNumIterations(
7555 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const;
7559 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const;
7562 buildCounterVar(llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
7563 DSAStackTy &DSA)
const;
7566 Expr *buildPrivateCounterVar()
const;
7570 Expr *buildCounterStep()
const;
7574 buildOrderedLoopData(
Scope *S,
Expr *Counter,
7575 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
7579 std::pair<Expr *, Expr *> buildMinMaxValues(
7580 Scope *S, llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const;
7582 Expr *buildFinalCondition(
Scope *S)
const;
7584 bool dependent()
const;
7586 bool doesInitDependOnLC()
const {
return InitDependOnLC.hasValue(); }
7588 bool doesCondDependOnLC()
const {
return CondDependOnLC.hasValue(); }
7590 unsigned getLoopDependentIdx()
const {
7591 return InitDependOnLC.getValueOr(CondDependOnLC.getValueOr(0));
7597 bool checkAndSetIncRHS(
Expr *RHS);
7605 bool setStep(
Expr *NewStep,
bool Subtract);
7608 bool OpenMPIterationSpaceChecker::dependent()
const {
7610 assert(!LB && !UB && !Step);
7618 bool OpenMPIterationSpaceChecker::setLCDeclAndLB(
ValueDecl *NewLCDecl,
7620 Expr *NewLB,
bool EmitDiags) {
7622 assert(LCDecl ==
nullptr && LB ==
nullptr && LCRef ==
nullptr &&
7623 UB ==
nullptr && Step ==
nullptr && !TestIsLessOp && !TestIsStrictOp);
7627 LCRef = NewLCRefExpr;
7628 if (
auto *CE = dyn_cast_or_null<CXXConstructExpr>(NewLB))
7630 if ((Ctor->isCopyOrMoveConstructor() ||
7631 Ctor->isConvertingConstructor(
false)) &&
7632 CE->getNumArgs() > 0 && CE->getArg(0) !=
nullptr)
7636 InitDependOnLC = doesDependOnLoopCounter(LB,
true);
7640 bool OpenMPIterationSpaceChecker::setUB(
Expr *NewUB,
7645 assert(LCDecl !=
nullptr && LB !=
nullptr && UB ==
nullptr &&
7646 Step ==
nullptr && !TestIsLessOp && !TestIsStrictOp);
7651 TestIsLessOp = LessOp;
7652 TestIsStrictOp = StrictOp;
7653 ConditionSrcRange = SR;
7655 CondDependOnLC = doesDependOnLoopCounter(UB,
false);
7659 bool OpenMPIterationSpaceChecker::setStep(
Expr *NewStep,
bool Subtract) {
7661 assert(LCDecl !=
nullptr && LB !=
nullptr && Step ==
nullptr);
7671 NewStep = Val.
get();
7688 Result && Result->isSigned() && (Subtract != Result->isNegative());
7690 Result && Result->isSigned() && (Subtract == Result->isNegative());
7691 bool IsConstZero = Result && !Result->getBoolValue();
7694 if (!TestIsLessOp.hasValue())
7695 TestIsLessOp = IsConstPos || (IsUnsigned && !Subtract);
7697 (IsConstZero || (TestIsLessOp.getValue()
7698 ? (IsConstNeg || (IsUnsigned && Subtract))
7699 : (IsConstPos || (IsUnsigned && !Subtract))))) {
7701 diag::err_omp_loop_incr_not_compatible)
7702 << LCDecl << TestIsLessOp.getValue() << NewStep->
getSourceRange();
7703 SemaRef.
Diag(ConditionLoc,
7704 diag::note_omp_loop_cond_requres_compatible_incr)
7705 << TestIsLessOp.getValue() << ConditionSrcRange;
7708 if (TestIsLessOp.getValue() == Subtract) {
7712 Subtract = !Subtract;
7717 SubtractStep = Subtract;
7724 class LoopCounterRefChecker final
7731 bool IsInitializer =
true;
7732 bool SupportsNonRectangular;
7733 unsigned BaseLoopId = 0;
7736 SemaRef.Diag(E->
getExprLoc(), diag::err_omp_stmt_depends_on_loop_counter)
7737 << (IsInitializer ? 0 : 1);
7740 const auto &&Data = Stack.isLoopControlVariable(VD);
7746 llvm::raw_svector_ostream
OS(Name);
7750 diag::err_omp_wrong_dependency_iterator_type)
7752 SemaRef.Diag(VD->
getLocation(), diag::note_previous_decl) << VD;
7755 if (Data.first && !SupportsNonRectangular) {
7756 SemaRef.Diag(E->
getExprLoc(), diag::err_omp_invariant_dependency);
7760 (DepDecl || (PrevDepDecl &&
7762 if (!DepDecl && PrevDepDecl)
7763 DepDecl = PrevDepDecl;
7765 llvm::raw_svector_ostream
OS(Name);
7769 diag::err_omp_invariant_or_linear_dependency)
7775 BaseLoopId = Data.first;
7783 if (isa<VarDecl>(VD))
7784 return checkDecl(E, VD);
7790 if (isa<VarDecl>(VD) || isa<FieldDecl>(VD))
7791 return checkDecl(E, VD);
7795 bool VisitStmt(
const Stmt *S) {
7797 for (
const Stmt *Child : S->children())
7798 Res = (Child && Visit(Child)) || Res;
7801 explicit LoopCounterRefChecker(
Sema &SemaRef, DSAStackTy &Stack,
7802 const ValueDecl *CurLCDecl,
bool IsInitializer,
7804 bool SupportsNonRectangular =
true)
7805 : SemaRef(SemaRef), Stack(Stack), CurLCDecl(CurLCDecl),
7806 PrevDepDecl(PrevDepDecl), IsInitializer(IsInitializer),
7807 SupportsNonRectangular(SupportsNonRectangular) {}
7808 unsigned getBaseLoopId()
const {
7809 assert(CurLCDecl &&
"Expected loop dependency.");
7813 assert(CurLCDecl &&
"Expected loop dependency.");
7820 OpenMPIterationSpaceChecker::doesDependOnLoopCounter(
const Stmt *S,
7821 bool IsInitializer) {
7823 LoopCounterRefChecker LoopStmtChecker(SemaRef, Stack, LCDecl, IsInitializer,
7824 DepDecl, SupportsNonRectangular);
7825 if (LoopStmtChecker.Visit(S)) {
7826 DepDecl = LoopStmtChecker.getDepDecl();
7827 return LoopStmtChecker.getBaseLoopId();
7832 bool OpenMPIterationSpaceChecker::checkAndSetInit(
Stmt *S,
bool EmitDiags) {
7843 SemaRef.Diag(DefaultLoc, diag::err_omp_loop_not_canonical_init);
7847 if (
auto *ExprTemp = dyn_cast<ExprWithCleanups>(S))
7848 if (!ExprTemp->cleanupsHaveSideEffects())
7849 S = ExprTemp->getSubExpr();
7851 InitSrcRange = S->getSourceRange();
7852 if (
Expr *E = dyn_cast<Expr>(S))
7854 if (
auto *BO = dyn_cast<BinaryOperator>(S)) {
7855 if (BO->getOpcode() == BO_Assign) {
7857 if (
auto *DRE = dyn_cast<DeclRefExpr>(LHS)) {
7858 if (
auto *CED = dyn_cast<OMPCapturedExprDecl>(DRE->getDecl()))
7860 return setLCDeclAndLB(ME->getMemberDecl(), ME, BO->getRHS(),
7862 return setLCDeclAndLB(DRE->getDecl(), DRE, BO->getRHS(), EmitDiags);
7864 if (
auto *ME = dyn_cast<MemberExpr>(LHS)) {
7865 if (ME->isArrow() &&
7866 isa<CXXThisExpr>(ME->getBase()->IgnoreParenImpCasts()))
7867 return setLCDeclAndLB(ME->getMemberDecl(), ME, BO->getRHS(),
7871 }
else if (
auto *DS = dyn_cast<DeclStmt>(S)) {
7872 if (DS->isSingleDecl()) {
7873 if (
auto *Var = dyn_cast_or_null<VarDecl>(DS->getSingleDecl())) {
7874 if (Var->hasInit() && !Var->getType()->isReferenceType()) {
7877 SemaRef.Diag(S->getBeginLoc(),
7878 diag::ext_omp_loop_not_canonical_init)
7879 << S->getSourceRange();
7880 return setLCDeclAndLB(
7883 Var->getType().getNonReferenceType(),
7885 Var->getInit(), EmitDiags);
7889 }
else if (
auto *CE = dyn_cast<CXXOperatorCallExpr>(S)) {
7890 if (CE->getOperator() == OO_Equal) {
7891 Expr *LHS = CE->getArg(0);
7892 if (
auto *DRE = dyn_cast<DeclRefExpr>(LHS)) {
7893 if (
auto *CED = dyn_cast<OMPCapturedExprDecl>(DRE->getDecl()))
7895 return setLCDeclAndLB(ME->getMemberDecl(), ME, BO->getRHS(),
7897 return setLCDeclAndLB(DRE->getDecl(), DRE, CE->getArg(1), EmitDiags);
7899 if (
auto *ME = dyn_cast<MemberExpr>(LHS)) {
7900 if (ME->isArrow() &&
7901 isa<CXXThisExpr>(ME->getBase()->IgnoreParenImpCasts()))
7902 return setLCDeclAndLB(ME->getMemberDecl(), ME, BO->getRHS(),
7908 if (dependent() || SemaRef.CurContext->isDependentContext())
7911 SemaRef.Diag(S->getBeginLoc(), diag::err_omp_loop_not_canonical_init)
7912 << S->getSourceRange();
7923 if (
const auto *CE = dyn_cast_or_null<CXXConstructExpr>(E))
7925 if ((Ctor->isCopyOrMoveConstructor() ||
7926 Ctor->isConvertingConstructor(
false)) &&
7927 CE->getNumArgs() > 0 && CE->getArg(0) !=
nullptr)
7929 if (
const auto *DRE = dyn_cast_or_null<DeclRefExpr>(E)) {
7930 if (
const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()))
7933 if (
const auto *ME = dyn_cast_or_null<MemberExpr>(E))
7934 if (ME->isArrow() && isa<CXXThisExpr>(ME->getBase()->IgnoreParenImpCasts()))
7939 bool OpenMPIterationSpaceChecker::checkAndSetCond(
Expr *S) {
7946 bool IneqCondIsCanonical = SemaRef.getLangOpts().OpenMP >= 50;
7948 SemaRef.Diag(DefaultLoc, diag::err_omp_loop_not_canonical_cond)
7949 << (IneqCondIsCanonical ? 1 : 0) << LCDecl;
7955 auto &&CheckAndSetCond = [
this, IneqCondIsCanonical](
7960 if (getInitLCDecl(LHS) == LCDecl)
7961 return setUB(
const_cast<Expr *
>(RHS),
7964 if (getInitLCDecl(RHS) == LCDecl)
7965 return setUB(
const_cast<Expr *
>(LHS),
7968 }
else if (IneqCondIsCanonical &&
Opcode == BO_NE) {
7969 return setUB(
const_cast<Expr *
>(getInitLCDecl(LHS) == LCDecl ? RHS : LHS),
7976 if (
auto *RBO = dyn_cast<CXXRewrittenBinaryOperator>(S)) {
7979 RBO->getOperatorLoc());
7980 }
else if (
auto *BO = dyn_cast<BinaryOperator>(S)) {
7981 Res = CheckAndSetCond(BO->getOpcode(), BO->getLHS(), BO->getRHS(),
7982 BO->getSourceRange(), BO->getOperatorLoc());
7983 }
else if (
auto *CE = dyn_cast<CXXOperatorCallExpr>(S)) {
7984 if (CE->getNumArgs() == 2) {
7985 Res = CheckAndSetCond(
7987 CE->getArg(1), CE->getSourceRange(), CE->getOperatorLoc());
7992 if (dependent() || SemaRef.CurContext->isDependentContext())
7994 SemaRef.Diag(CondLoc, diag::err_omp_loop_not_canonical_cond)
7995 << (IneqCondIsCanonical ? 1 : 0) << S->getSourceRange() << LCDecl;
7999 bool OpenMPIterationSpaceChecker::checkAndSetIncRHS(
Expr *RHS) {
8006 if (
auto *BO = dyn_cast<BinaryOperator>(RHS)) {
8007 if (BO->isAdditiveOp()) {
8008 bool IsAdd = BO->getOpcode() == BO_Add;
8009 if (getInitLCDecl(BO->getLHS()) == LCDecl)
8010 return setStep(BO->getRHS(), !IsAdd);
8011 if (IsAdd && getInitLCDecl(BO->getRHS()) == LCDecl)
8012 return setStep(BO->getLHS(),
false);
8014 }
else if (
auto *CE = dyn_cast<CXXOperatorCallExpr>(RHS)) {
8015 bool IsAdd = CE->getOperator() == OO_Plus;
8016 if ((IsAdd || CE->getOperator() == OO_Minus) && CE->getNumArgs() == 2) {
8017 if (getInitLCDecl(CE->getArg(0)) == LCDecl)
8018 return setStep(CE->getArg(1), !IsAdd);
8019 if (IsAdd && getInitLCDecl(CE->getArg(1)) == LCDecl)
8020 return setStep(CE->getArg(0),
false);
8023 if (dependent() || SemaRef.CurContext->isDependentContext())
8025 SemaRef.Diag(RHS->
getBeginLoc(), diag::err_omp_loop_not_canonical_incr)
8030 bool OpenMPIterationSpaceChecker::checkAndSetInc(
Expr *S) {
8045 SemaRef.Diag(DefaultLoc, diag::err_omp_loop_not_canonical_incr) << LCDecl;
8048 if (
auto *ExprTemp = dyn_cast<ExprWithCleanups>(S))
8049 if (!ExprTemp->cleanupsHaveSideEffects())
8050 S = ExprTemp->getSubExpr();
8052 IncrementSrcRange = S->getSourceRange();
8053 S = S->IgnoreParens();
8054 if (
auto *UO = dyn_cast<UnaryOperator>(S)) {
8055 if (UO->isIncrementDecrementOp() &&
8056 getInitLCDecl(UO->getSubExpr()) == LCDecl)
8057 return setStep(SemaRef
8058 .ActOnIntegerConstant(UO->getBeginLoc(),
8059 (UO->isDecrementOp() ? -1 : 1))
8062 }
else if (
auto *BO = dyn_cast<BinaryOperator>(S)) {
8063 switch (BO->getOpcode()) {
8066 if (getInitLCDecl(BO->getLHS()) == LCDecl)
8067 return setStep(BO->getRHS(), BO->getOpcode() == BO_SubAssign);
8070 if (getInitLCDecl(BO->getLHS()) == LCDecl)
8071 return checkAndSetIncRHS(BO->getRHS());
8076 }
else if (
auto *CE = dyn_cast<CXXOperatorCallExpr>(S)) {
8077 switch (CE->getOperator()) {
8080 if (getInitLCDecl(CE->getArg(0)) == LCDecl)
8081 return setStep(SemaRef
8082 .ActOnIntegerConstant(
8084 ((CE->getOperator() == OO_MinusMinus) ? -1 : 1))
8090 if (getInitLCDecl(CE->getArg(0)) == LCDecl)
8091 return setStep(CE->getArg(1), CE->getOperator() == OO_MinusEqual);
8094 if (getInitLCDecl(CE->getArg(0)) == LCDecl)
8095 return checkAndSetIncRHS(CE->getArg(1));
8101 if (dependent() || SemaRef.CurContext->isDependentContext())
8103 SemaRef.Diag(S->getBeginLoc(), diag::err_omp_loop_not_canonical_incr)
8104 << S->getSourceRange() << LCDecl;
8110 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures) {
8117 auto I = Captures.find(
Capture);
8118 if (I != Captures.end())
8131 bool TestIsStrictOp,
bool RoundToStep,
8132 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures) {
8133 ExprResult NewStep = tryBuildCapture(SemaRef, Step, Captures);
8137 bool IsLowerConst =
false, IsStepConst =
false;
8141 IsLowerConst =
true;
8148 bool NoNeedToConvert = IsLowerConst && !RoundToStep &&
8149 ((!TestIsStrictOp && LRes.isNonNegative()) ||
8150 (TestIsStrictOp && LRes.isStrictlyPositive()));
8151 bool NeedToReorganize =
false;
8153 if (!NoNeedToConvert && IsLowerConst &&
8154 (TestIsStrictOp || (RoundToStep && IsStepConst))) {
8155 NoNeedToConvert =
true;
8157 unsigned BW = LRes.getBitWidth() > SRes.getBitWidth()
8158 ? LRes.getBitWidth()
8159 : SRes.getBitWidth();
8160 LRes = LRes.extend(BW + 1);
8161 LRes.setIsSigned(
true);
8162 SRes = SRes.extend(BW + 1);
8163 SRes.setIsSigned(
true);
8165 NoNeedToConvert = LRes.trunc(BW).extend(BW + 1) == LRes;
8166 LRes = LRes.trunc(BW);
8168 if (TestIsStrictOp) {
8169 unsigned BW = LRes.getBitWidth();
8170 LRes = LRes.extend(BW + 1);
8171 LRes.setIsSigned(
true);
8174 NoNeedToConvert && LRes.trunc(BW).extend(BW + 1) == LRes;
8176 LRes = LRes.trunc(BW);
8178 NeedToReorganize = NoNeedToConvert;
8181 bool IsUpperConst =
false;
8185 IsUpperConst =
true;
8187 if (NoNeedToConvert && IsLowerConst && IsUpperConst &&
8188 (!RoundToStep || IsStepConst)) {
8189 unsigned BW = LRes.getBitWidth() > URes.getBitWidth() ? LRes.getBitWidth()
8190 : URes.getBitWidth();
8191 LRes = LRes.extend(BW + 1);
8192 LRes.setIsSigned(
true);
8193 URes = URes.extend(BW + 1);
8194 URes.setIsSigned(
true);
8196 NoNeedToConvert = URes.trunc(BW).extend(BW + 1) == URes;
8197 NeedToReorganize = NoNeedToConvert;
8202 if ((!NoNeedToConvert || (LRes.isNegative() && !IsUpperConst)) &&
8208 if ((LowerSize <= UpperSize && UpperTy->hasSignedIntegerRepresentation()) ||
8211 LowerSize > UpperSize ? LowerSize : UpperSize, 0);
8222 if (!Lower || !Upper || NewStep.
isInvalid())
8228 if (NeedToReorganize) {
8242 S, DefaultLoc, BO_Add, Diff.
get(),
8252 Diff = SemaRef.
BuildBinOp(S, DefaultLoc, BO_Sub, Upper, Diff.
get());
8256 Diff = SemaRef.
BuildBinOp(S, DefaultLoc, BO_Sub, Upper, Lower);
8272 S, DefaultLoc, BO_Sub, Diff.
get(),
8292 Diff = SemaRef.
BuildBinOp(S, DefaultLoc, BO_Div, Diff.
get(), NewStep.
get());
8300 Expr *OpenMPIterationSpaceChecker::buildNumIterations(
8302 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const {
8311 if (InitDependOnLC) {
8312 const LoopIterationSpace &IS = ResultIterSpaces[*InitDependOnLC - 1];
8313 if (!IS.MinValue || !IS.MaxValue)
8322 IS.CounterVar, MinValue.
get());
8327 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Comma, LBMinVal.
get(), LBVal);
8342 IS.CounterVar, MaxValue.
get());
8347 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Comma, LBMaxVal.
get(), LBVal);
8355 Expr *LBMin = tryBuildCapture(SemaRef, LBMinVal.
get(), Captures).get();
8356 Expr *LBMax = tryBuildCapture(SemaRef, LBMaxVal.
get(), Captures).get();
8357 if (!LBMin || !LBMax)
8361 SemaRef.
BuildBinOp(S, DefaultLoc, BO_LT, LBMin, LBMax);
8365 tryBuildCapture(SemaRef, MinLessMaxRes.
get(), Captures).get();
8368 if (TestIsLessOp.getValue()) {
8372 MinLessMax, LBMin, LBMax);
8375 LBVal = MinLB.
get();
8380 MinLessMax, LBMax, LBMin);
8383 LBVal = MaxLB.
get();
8388 if (CondDependOnLC) {
8389 const LoopIterationSpace &IS = ResultIterSpaces[*CondDependOnLC - 1];
8390 if (!IS.MinValue || !IS.MaxValue)
8399 IS.CounterVar, MinValue.
get());
8404 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Comma, UBMinVal.
get(), UBVal);
8419 IS.CounterVar, MaxValue.
get());
8424 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Comma, UBMaxVal.
get(), UBVal);
8432 Expr *UBMin = tryBuildCapture(SemaRef, UBMinVal.
get(), Captures).get();
8433 Expr *UBMax = tryBuildCapture(SemaRef, UBMaxVal.
get(), Captures).get();
8434 if (!UBMin || !UBMax)
8438 SemaRef.
BuildBinOp(S, DefaultLoc, BO_GT, UBMin, UBMax);
8441 Expr *MinGreaterMax =
8442 tryBuildCapture(SemaRef, MinGreaterMaxRes.
get(), Captures).get();
8445 if (TestIsLessOp.getValue()) {
8449 DefaultLoc, DefaultLoc, MinGreaterMax, UBMin, UBMax);
8452 UBVal = MaxUB.
get();
8457 DefaultLoc, DefaultLoc, MinGreaterMax, UBMax, UBMin);
8460 UBVal = MinUB.
get();
8463 Expr *UBExpr = TestIsLessOp.getValue() ? UBVal : LBVal;
8464 Expr *LBExpr = TestIsLessOp.getValue() ? LBVal : UBVal;
8465 Expr *Upper = tryBuildCapture(SemaRef, UBExpr, Captures).get();
8466 Expr *Lower = tryBuildCapture(SemaRef, LBExpr, Captures).get();
8467 if (!Upper || !Lower)
8470 ExprResult Diff = calculateNumIters(SemaRef, S, DefaultLoc, Lower, Upper,
8471 Step, VarType, TestIsStrictOp,
8480 C.getTypeSize(
Type) >
C.getTypeSize(VarType);
8483 UseVarType ?
C.getTypeSize(VarType) :
C.getTypeSize(
Type);
8486 Type =
C.getIntTypeForBitwidth(NewSize, IsSigned);
8495 unsigned NewSize = (
C.getTypeSize(
Type) > 32) ? 64 : 32;
8496 if (NewSize !=
C.getTypeSize(
Type)) {
8497 if (NewSize <
C.getTypeSize(
Type)) {
8498 assert(NewSize == 64 &&
"incorrect loop var size");
8499 SemaRef.
Diag(DefaultLoc, diag::warn_omp_loop_64_bit_var)
8500 << InitSrcRange << ConditionSrcRange;
8502 QualType NewType =
C.getIntTypeForBitwidth(
8504 C.getTypeSize(
Type) < NewSize);
8517 std::pair<Expr *, Expr *> OpenMPIterationSpaceChecker::buildMinMaxValues(
8518 Scope *S, llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const {
8522 return std::make_pair(
nullptr,
nullptr);
8525 Expr *MinExpr =
nullptr;
8526 Expr *MaxExpr =
nullptr;
8527 Expr *LBExpr = TestIsLessOp.getValue() ? LB : UB;
8528 Expr *UBExpr = TestIsLessOp.getValue() ? UB : LB;
8529 bool LBNonRect = TestIsLessOp.getValue() ? InitDependOnLC.hasValue()
8530 : CondDependOnLC.hasValue();
8531 bool UBNonRect = TestIsLessOp.getValue() ? CondDependOnLC.hasValue()
8532 : InitDependOnLC.hasValue();
8534 LBNonRect ? LBExpr : tryBuildCapture(SemaRef, LBExpr, Captures).get();
8536 UBNonRect ? UBExpr : tryBuildCapture(SemaRef, UBExpr, Captures).get();
8537 if (!Upper || !Lower)
8538 return std::make_pair(
nullptr,
nullptr);
8540 if (TestIsLessOp.getValue())
8548 ExprResult Diff = calculateNumIters(SemaRef, S, DefaultLoc, Lower, Upper,
8549 Step, VarType, TestIsStrictOp,
8552 return std::make_pair(
nullptr,
nullptr);
8558 return std::make_pair(
nullptr,
nullptr);
8560 ExprResult NewStep = tryBuildCapture(SemaRef, Step, Captures);
8562 return std::make_pair(
nullptr,
nullptr);
8563 Diff = SemaRef.
BuildBinOp(S, DefaultLoc, BO_Mul, Diff.
get(), NewStep.
get());
8565 return std::make_pair(
nullptr,
nullptr);
8570 return std::make_pair(
nullptr,
nullptr);
8575 Diff.
get()->getType(),
8582 return std::make_pair(
nullptr,
nullptr);
8584 if (TestIsLessOp.getValue()) {
8588 S, DefaultLoc, BO_Add,
8592 return std::make_pair(
nullptr,
nullptr);
8597 S, DefaultLoc, BO_Sub,
8601 return std::make_pair(
nullptr,
nullptr);
8610 return std::make_pair(
nullptr,
nullptr);
8615 return std::make_pair(
nullptr,
nullptr);
8617 if (TestIsLessOp.getValue())
8618 MaxExpr = Diff.
get();
8620 MinExpr = Diff.
get();
8622 return std::make_pair(MinExpr, MaxExpr);
8625 Expr *OpenMPIterationSpaceChecker::buildFinalCondition(
Scope *S)
const {
8626 if (InitDependOnLC || CondDependOnLC)
8631 Expr *OpenMPIterationSpaceChecker::buildPreCond(
8633 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const {
8638 if (CondDependOnLC || InitDependOnLC)
8649 ExprResult NewLB = tryBuildCapture(SemaRef, LB, Captures);
8650 ExprResult NewUB = tryBuildCapture(SemaRef, UB, Captures);
8656 TestIsLessOp.getValue() ? (TestIsStrictOp ? BO_LT : BO_LE)
8657 : (TestIsStrictOp ? BO_GT : BO_GE),
8658 NewLB.
get(), NewUB.
get());
8668 return CondExpr.
isUsable() ? CondExpr.
get() : Cond;
8672 DeclRefExpr *OpenMPIterationSpaceChecker::buildCounterVar(
8673 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
8674 DSAStackTy &DSA)
const {
8675 auto *VD = dyn_cast<VarDecl>(LCDecl);
8680 const DSAStackTy::DSAVarData Data =
8681 DSA.getTopDSA(LCDecl,
false);
8685 Captures.insert(std::make_pair(LCRef, Ref));
8688 return cast<DeclRefExpr>(LCRef);
8691 Expr *OpenMPIterationSpaceChecker::buildPrivateCounterVar()
const {
8697 isa<VarDecl>(LCDecl)
8711 Expr *OpenMPIterationSpaceChecker::buildCounterStep()
const {
return Step; }
8713 Expr *OpenMPIterationSpaceChecker::buildOrderedLoopData(
8715 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
SourceLocation Loc,
8721 assert((OOK == OO_Plus || OOK == OO_Minus) &&
8722 "Expected only + or - operations for depend clauses.");
8733 Expr *Upper = TestIsLessOp.getValue()
8735 : tryBuildCapture(SemaRef, LB, Captures).get();
8736 Expr *Lower = TestIsLessOp.getValue()
8737 ? tryBuildCapture(SemaRef, LB, Captures).get()
8739 if (!Upper || !Lower)
8743 SemaRef, S, DefaultLoc, Lower, Upper, Step, VarType,
8744 false,
false, Captures);
8753 assert(
getLangOpts().OpenMP &&
"OpenMP is not active.");
8754 assert(Init &&
"Expected loop in canonical form.");
8755 unsigned AssociatedLoops =
DSAStack->getAssociatedLoops();
8756 if (AssociatedLoops > 0 &&
8759 OpenMPIterationSpaceChecker ISC(*
this,
true,
8761 if (!ISC.checkAndSetInit(Init,
false)) {
8763 auto *VD = dyn_cast<VarDecl>(D);
8769 PrivateRef =
buildCapture(*
this, D, ISC.getLoopDeclRefExpr(),
8771 VD = cast<VarDecl>(PrivateRef->
getDecl());
8774 DSAStack->addLoopControlVariable(D, VD);
8777 DSAStack->resetPossibleLoopCounter();
8778 if (
auto *Var = dyn_cast_or_null<VarDecl>(LD))
8792 DSAStackTy::DSAVarData DVar =
8796 Expr *LoopDeclRefExpr = ISC.getLoopDeclRefExpr();
8799 ? (
DSAStack->hasMutipleLoops() ? OMPC_lastprivate : OMPC_linear)
8802 DVar.CKind != PredeterminedCKind && DVar.RefExpr &&
8803 (
LangOpts.OpenMP <= 45 || (DVar.CKind != OMPC_lastprivate &&
8804 DVar.CKind != OMPC_private))) ||
8806 DKind == OMPD_master_taskloop ||
8807 DKind == OMPD_parallel_master_taskloop ||
8810 DVar.CKind != OMPC_private && DVar.CKind != OMPC_lastprivate)) &&
8811 (DVar.CKind != OMPC_private || DVar.RefExpr)) {
8812 Diag(Init->getBeginLoc(), diag::err_omp_loop_var_dsa)
8813 << getOpenMPClauseName(DVar.CKind)
8814 << getOpenMPDirectiveName(DKind)
8815 << getOpenMPClauseName(PredeterminedCKind);
8816 if (DVar.RefExpr ==
nullptr)
8817 DVar.CKind = PredeterminedCKind;
8820 }
else if (LoopDeclRefExpr) {
8825 if (DVar.CKind == OMPC_unknown)
8826 DSAStack->addDSA(D, LoopDeclRefExpr, PredeterminedCKind,
8831 DSAStack->setAssociatedLoops(AssociatedLoops - 1);
8839 unsigned CurrentNestedLoopCount,
unsigned NestedLoopCount,
8840 unsigned TotalNestedLoopCount,
Expr *CollapseLoopCountExpr,
8841 Expr *OrderedLoopCountExpr,
8844 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures) {
8849 if (
auto *CanonLoop = dyn_cast_or_null<OMPCanonicalLoop>(S))
8850 S = CanonLoop->getLoopStmt();
8851 auto *For = dyn_cast_or_null<ForStmt>(S);
8852 auto *CXXFor = dyn_cast_or_null<CXXForRangeStmt>(S);
8854 if (!For && (SemaRef.
LangOpts.OpenMP <= 45 || !CXXFor)) {
8855 SemaRef.
Diag(S->getBeginLoc(), diag::err_omp_not_for)
8856 << (CollapseLoopCountExpr !=
nullptr || OrderedLoopCountExpr !=
nullptr)
8857 << getOpenMPDirectiveName(DKind) << TotalNestedLoopCount
8858 << (CurrentNestedLoopCount > 0) << CurrentNestedLoopCount;
8859 if (TotalNestedLoopCount > 1) {
8860 if (CollapseLoopCountExpr && OrderedLoopCountExpr)
8861 SemaRef.
Diag(DSA.getConstructLoc(),
8862 diag::note_omp_collapse_ordered_expr)
8865 else if (CollapseLoopCountExpr)
8867 diag::note_omp_collapse_ordered_expr)
8871 diag::note_omp_collapse_ordered_expr)
8876 assert(((For && For->getBody()) || (CXXFor && CXXFor->getBody())) &&
8882 OpenMPIterationSpaceChecker ISC(SemaRef, SupportsNonRectangular, DSA,
8883 For ? For->getForLoc() : CXXFor->getForLoc());
8886 Stmt *Init = For ? For->getInit() : CXXFor->getBeginStmt();
8887 if (ISC.checkAndSetInit(Init))
8890 bool HasErrors =
false;
8893 if (
ValueDecl *LCDecl = ISC.getLoopDecl()) {
8903 SemaRef.
Diag(Init->getBeginLoc(), diag::err_omp_loop_variable_type)
8917 VarsWithImplicitDSA.erase(LCDecl);
8922 HasErrors |= ISC.checkAndSetCond(For ? For->getCond() : CXXFor->getCond());
8925 HasErrors |= ISC.checkAndSetInc(For ? For->getInc() : CXXFor->getInc());
8932 ResultIterSpaces[CurrentNestedLoopCount].PreCond = ISC.buildPreCond(
8933 DSA.getCurScope(), For ? For->getCond() : CXXFor->getCond(), Captures);
8934 ResultIterSpaces[CurrentNestedLoopCount].NumIterations =
8935 ISC.buildNumIterations(DSA.getCurScope(), ResultIterSpaces,
8942 ResultIterSpaces[CurrentNestedLoopCount].CounterVar =
8943 ISC.buildCounterVar(Captures, DSA);
8944 ResultIterSpaces[CurrentNestedLoopCount].PrivateCounterVar =
8945 ISC.buildPrivateCounterVar();
8946 ResultIterSpaces[CurrentNestedLoopCount].CounterInit = ISC.buildCounterInit();
8947 ResultIterSpaces[CurrentNestedLoopCount].CounterStep = ISC.buildCounterStep();
8948 ResultIterSpaces[CurrentNestedLoopCount].InitSrcRange = ISC.getInitSrcRange();
8949 ResultIterSpaces[CurrentNestedLoopCount].CondSrcRange =
8950 ISC.getConditionSrcRange();
8951 ResultIterSpaces[CurrentNestedLoopCount].IncSrcRange =
8952 ISC.getIncrementSrcRange();
8953 ResultIterSpaces[CurrentNestedLoopCount].Subtract = ISC.shouldSubtractStep();
8954 ResultIterSpaces[CurrentNestedLoopCount].IsStrictCompare =
8955 ISC.isStrictTestOp();
8956 std::tie(ResultIterSpaces[CurrentNestedLoopCount].MinValue,
8957 ResultIterSpaces[CurrentNestedLoopCount].MaxValue) =
8958 ISC.buildMinMaxValues(DSA.getCurScope(), Captures);
8959 ResultIterSpaces[CurrentNestedLoopCount].FinalCondition =
8960 ISC.buildFinalCondition(DSA.getCurScope());
8961 ResultIterSpaces[CurrentNestedLoopCount].IsNonRectangularLB =
8962 ISC.doesInitDependOnLC();
8963 ResultIterSpaces[CurrentNestedLoopCount].IsNonRectangularUB =
8964 ISC.doesCondDependOnLC();
8965 ResultIterSpaces[CurrentNestedLoopCount].LoopDependentIdx =
8966 ISC.getLoopDependentIdx();
8969 (ResultIterSpaces[CurrentNestedLoopCount].PreCond ==
nullptr ||
8970 ResultIterSpaces[CurrentNestedLoopCount].NumIterations ==
nullptr ||
8971 ResultIterSpaces[CurrentNestedLoopCount].CounterVar ==
nullptr ||
8972 ResultIterSpaces[CurrentNestedLoopCount].PrivateCounterVar ==
nullptr ||
8973 ResultIterSpaces[CurrentNestedLoopCount].CounterInit ==
nullptr ||
8974 ResultIterSpaces[CurrentNestedLoopCount].CounterStep ==
nullptr);
8975 if (!HasErrors && DSA.isOrderedRegion()) {
8976 if (DSA.getOrderedRegionParam().second->getNumForLoops()) {
8977 if (CurrentNestedLoopCount <
8978 DSA.getOrderedRegionParam().second->getLoopNumIterations().size()) {
8979 DSA.getOrderedRegionParam().second->setLoopNumIterations(
8980 CurrentNestedLoopCount,
8981 ResultIterSpaces[CurrentNestedLoopCount].NumIterations);
8982 DSA.getOrderedRegionParam().second->setLoopCounter(
8983 CurrentNestedLoopCount,
8984 ResultIterSpaces[CurrentNestedLoopCount].CounterVar);
8987 for (
auto &Pair : DSA.getDoacrossDependClauses()) {
8988 if (CurrentNestedLoopCount >= Pair.first->getNumLoops()) {
8992 if (Pair.first->getDependencyKind() == OMPC_DEPEND_sink &&
8993 Pair.second.size() <= CurrentNestedLoopCount) {
8995 Pair.first->setLoopData(CurrentNestedLoopCount,
nullptr);
8999 if (Pair.first->getDependencyKind() == OMPC_DEPEND_source)
9000 CntValue = ISC.buildOrderedLoopData(
9002 ResultIterSpaces[CurrentNestedLoopCount].CounterVar, Captures,
9003 Pair.first->getDependencyLoc());
9005 CntValue = ISC.buildOrderedLoopData(
9007 ResultIterSpaces[CurrentNestedLoopCount].CounterVar, Captures,
9008 Pair.first->getDependencyLoc(),
9009 Pair.second[CurrentNestedLoopCount].first,
9010 Pair.second[CurrentNestedLoopCount].second);
9011 Pair.first->setLoopData(CurrentNestedLoopCount, CntValue);
9022 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures) {
9026 : tryBuildCapture(SemaRef, Start.
get(), Captures);
9030 VarRef.
get()->getType())) {
9047 bool IsNonRectangularLB,
9048 llvm::MapVector<const Expr *, DeclRefExpr *> *Captures =
nullptr) {
9057 NewStep = tryBuildCapture(SemaRef, Step.
get(), *Captures);
9072 if (Captures && !IsNonRectangularLB)
9073 NewStart = tryBuildCapture(SemaRef, Start.
get(), *Captures);
9080 if (VarRef.
get()->getType()->isOverloadableType() ||
9081 NewStart.
get()->getType()->isOverloadableType() ||
9082 Update.get()->getType()->isOverloadableType()) {
9089 SemaRef.
BuildBinOp(S, Loc, Subtract ? BO_SubAssign : BO_AddAssign,
9090 VarRef.
get(), SavedUpdate.
get());
9101 NewStart.
get(), SavedUpdate.
get());
9106 VarRef.
get()->getType())) {
9125 unsigned HasBits = C.getTypeSize(OldType);
9126 if (HasBits >= Bits)
9129 QualType NewType = C.getIntTypeForBitwidth(Bits,
true);
9141 return Signed ? Result->isSignedIntN(Bits) : Result->isIntN(Bits);
9148 if (!PreInits.empty()) {
9159 const llvm::MapVector<const Expr *, DeclRefExpr *> &Captures) {
9160 if (!Captures.empty()) {
9162 for (
const auto &Pair : Captures)
9163 PreInits.push_back(Pair.second->getDecl());
9171 Expr *PostUpdate =
nullptr;
9172 if (!PostUpdates.empty()) {
9173 for (
Expr *E : PostUpdates) {
9179 PostUpdate = PostUpdate
9194 Expr *OrderedLoopCountExpr,
Stmt *AStmt,
Sema &SemaRef,
9198 unsigned NestedLoopCount = 1;
9199 bool SupportsNonPerfectlyNested = (SemaRef.
LangOpts.OpenMP >= 50) &&
9202 if (CollapseLoopCountExpr) {
9207 NestedLoopCount = Result.Val.getInt().getLimitedValue();
9213 unsigned OrderedLoopCount = 1;
9214 if (OrderedLoopCountExpr) {
9221 if (Result.getLimitedValue() < NestedLoopCount) {
9223 diag::err_omp_wrong_ordered_loop_count)
9226 diag::note_collapse_loop_count)
9229 OrderedLoopCount = Result.getLimitedValue();
9237 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
9238 unsigned NumLoops =
std::max(OrderedLoopCount, NestedLoopCount);
9242 SupportsNonPerfectlyNested, NumLoops,
9243 [DKind, &SemaRef, &DSA, NumLoops, NestedLoopCount,
9244 CollapseLoopCountExpr, OrderedLoopCountExpr, &VarsWithImplicitDSA,
9245 &IterSpaces, &Captures](
unsigned Cnt,
Stmt *CurStmt) {
9247 DKind, CurStmt, SemaRef, DSA, Cnt, NestedLoopCount,
9248 NumLoops, CollapseLoopCountExpr, OrderedLoopCountExpr,
9249 VarsWithImplicitDSA, IterSpaces, Captures))
9251 if (Cnt > 0 && Cnt >= NestedLoopCount &&
9252 IterSpaces[Cnt].CounterVar) {
9254 auto *DRE = cast<DeclRefExpr>(IterSpaces[Cnt].CounterVar);
9255 if (isa<OMPCapturedExprDecl>(DRE->getDecl())) {
9256 Captures[DRE] = DRE;
9262 Stmt *DependentPreInits = Transform->getPreInits();
9263 if (!DependentPreInits)
9265 for (
Decl *C : cast<DeclStmt>(DependentPreInits)->getDeclGroup()) {
9266 auto *D = cast<VarDecl>(C);
9268 Transform->getBeginLoc());
9269 Captures[Ref] = Ref;
9274 Built.
clear( NestedLoopCount);
9277 return NestedLoopCount;
9310 auto PreCond =
ExprResult(IterSpaces[0].PreCond);
9311 Expr *N0 = IterSpaces[0].NumIterations;
9315 .PerformImplicitConversion(
9330 return NestedLoopCount;
9333 bool AllCountsNeedLessThan32Bits = C.getTypeSize(N0->
getType()) < 32;
9335 Scope *CurScope = DSA.getCurScope();
9336 for (
unsigned Cnt = 1; Cnt < NestedLoopCount; ++Cnt) {
9337 if (PreCond.isUsable()) {
9339 SemaRef.
BuildBinOp(CurScope, PreCond.get()->getExprLoc(), BO_LAnd,
9340 PreCond.get(), IterSpaces[Cnt].PreCond);
9342 Expr *N = IterSpaces[Cnt].NumIterations;
9344 AllCountsNeedLessThan32Bits &= C.getTypeSize(N->
getType()) < 32;
9347 CurScope, Loc, BO_Mul, LastIteration32.
get(),
9355 CurScope, Loc, BO_Mul, LastIteration64.
get(),
9365 if (SemaRef.
getLangOpts().OpenMPOptimisticCollapse ||
9367 C.getTypeSize(LastIteration32.
get()->getType()) == 32 &&
9368 (AllCountsNeedLessThan32Bits || NestedLoopCount == 1 ||
9371 LastIteration32.
get()->getType()->hasSignedIntegerRepresentation(),
9372 LastIteration64.
get(), SemaRef))))
9373 LastIteration = LastIteration32;
9391 CurScope, LastIteration.
get()->getExprLoc(), BO_Sub,
9392 LastIteration.
get(),
9400 bool IsConstant = LastIteration.
get()->isIntegerConstantExpr(SemaRef.
Context);
9404 tryBuildCapture(SemaRef, LastIteration.
get(), Captures);
9405 LastIteration = SaveRef;
9409 CurScope, SaveRef.
get()->getExprLoc(), BO_Add, SaveRef.
get(),
9418 ExprResult LB, UB, IL, ST, EUB, CombLB, CombUB, PrevLB, PrevUB, CombEUB;
9447 buildVarDecl(SemaRef, InitLoc, StrideVType,
".omp.stride");
9456 UB.
get(), LastIteration.
get());
9458 LastIteration.
get()->getExprLoc(), InitLoc, IsUBGreater.
get(),
9459 LastIteration.
get(), UB.
get());
9460 EUB = SemaRef.
BuildBinOp(CurScope, InitLoc, BO_Assign, UB.
get(),
9485 CurScope, InitLoc, BO_GT, CombUB.
get(), LastIteration.
get());
9488 LastIteration.
get(), CombUB.
get());
9489 CombEUB = SemaRef.
BuildBinOp(CurScope, InitLoc, BO_Assign, CombUB.
get(),
9494 const CapturedDecl *CD = cast<CapturedStmt>(AStmt)->getCapturedDecl();
9498 "Unexpected number of parameters in loop combined directive");
9527 Init = SemaRef.
BuildBinOp(CurScope, InitLoc, BO_Assign, IV.
get(), RHS);
9539 SemaRef.
BuildBinOp(CurScope, InitLoc, BO_Assign, IV.
get(), CombRHS);
9545 bool UseStrictCompare =
9547 llvm::all_of(IterSpaces, [](
const LoopIterationSpace &LIS) {
9548 return LIS.IsStrictCompare;
9554 if (UseStrictCompare) {
9557 .
BuildBinOp(CurScope, CondLoc, BO_Add, BoundUB,
9569 UseStrictCompare ? BO_LT : BO_LE, IV.
get(),
9572 NumIterations.
get());
9575 CombDistCond = SemaRef.
BuildBinOp(CurScope, CondLoc, BO_LT, IV.
get(),
9576 NumIterations.
get());
9581 Expr *BoundCombUB = CombUB.
get();
9582 if (UseStrictCompare) {
9586 CurScope, CondLoc, BO_Add, BoundCombUB,
9594 SemaRef.
BuildBinOp(CurScope, CondLoc, UseStrictCompare ? BO_LT : BO_LE,
9595 IV.
get(), BoundCombUB);
9604 Inc = SemaRef.
BuildBinOp(CurScope, IncLoc, BO_Assign, IV.
get(), Inc.
get());
9613 ExprResult NextLB, NextUB, CombNextLB, CombNextUB;
9646 CombNextLB = SemaRef.
BuildBinOp(CurScope, IncLoc, BO_Assign, CombLB.
get(),
9658 CombNextUB = SemaRef.
BuildBinOp(CurScope, IncLoc, BO_Assign, CombUB.
get(),
9672 ExprResult DistCond, DistInc, PrevEUB, ParForInDistCond;
9675 CurScope, CondLoc, UseStrictCompare ? BO_LT : BO_LE, IV.
get(), BoundUB);
9676 assert(DistCond.
isUsable() &&
"distribute cond expr was not built");
9680 assert(DistInc.
isUsable() &&
"distribute inc expr was not built");
9681 DistInc = SemaRef.
BuildBinOp(CurScope, DistIncLoc, BO_Assign, IV.
get(),
9685 assert(DistInc.
isUsable() &&
"distribute inc expr was not built");
9692 PrevUB.
get()->getType())) {
9696 DistEUBLoc, NewPrevUB.
get());
9701 UB.
get(), NewPrevUB.
get());
9703 DistEUBLoc, DistEUBLoc, IsUBGreater.
get(), NewPrevUB.
get(), UB.
get());
9704 PrevEUB = SemaRef.
BuildBinOp(CurScope, DistIncLoc, BO_Assign, UB.
get(),
9712 Expr *BoundPrevUB = PrevUB.
get();
9713 if (UseStrictCompare) {
9717 CurScope, CondLoc, BO_Add, BoundPrevUB,
9725 SemaRef.
BuildBinOp(CurScope, CondLoc, UseStrictCompare ? BO_LT : BO_LE,
9726 IV.
get(), BoundPrevUB);
9730 bool HasErrors =
false;
9731 Built.
Counters.resize(NestedLoopCount);
9732 Built.
Inits.resize(NestedLoopCount);
9733 Built.
Updates.resize(NestedLoopCount);
9734 Built.
Finals.resize(NestedLoopCount);
9755 for (
unsigned int Cnt = 0; Cnt < NestedLoopCount; ++Cnt) {
9756 LoopIterationSpace &IS = IterSpaces[Cnt];
9762 for (
unsigned int K = Cnt + 1; K < NestedLoopCount; ++K)
9763 Prod = SemaRef.
BuildBinOp(CurScope, UpdLoc, BO_Mul, Prod.
get(),
9764 IterSpaces[K].NumIterations);
9769 if (Cnt + 1 < NestedLoopCount)
9783 if (Cnt + 1 < NestedLoopCount)
9784 Prod = SemaRef.
BuildBinOp(CurScope, UpdLoc, BO_Mul, Iter.
get(),
9791 auto *VD = cast<VarDecl>(cast<DeclRefExpr>(IS.CounterVar)->getDecl());
9793 SemaRef, VD, IS.CounterVar->getType(), IS.CounterVar->getExprLoc(),
9797 IS.CounterInit, IS.IsNonRectangularLB, Captures);
9798 if (!Init.isUsable()) {
9803 SemaRef, CurScope, UpdLoc, CounterVar, IS.CounterInit, Iter,
9804 IS.CounterStep, IS.Subtract, IS.IsNonRectangularLB, &Captures);
9805 if (!
Update.isUsable()) {
9813 IS.CounterInit, IS.NumIterations, IS.CounterStep,
9814 IS.Subtract, IS.IsNonRectangularLB, &Captures);
9815 if (!Final.isUsable()) {
9820 if (!
Update.isUsable() || !Final.isUsable()) {
9825 Built.
Counters[Cnt] = IS.CounterVar;
9827 Built.
Inits[Cnt] = Init.get();
9829 Built.
Finals[Cnt] = Final.get();
9833 if (IS.IsNonRectangularLB || IS.IsNonRectangularUB) {
9835 Built.
Counters[NestedLoopCount - 1 - IS.LoopDependentIdx];
9837 Built.
Inits[NestedLoopCount - 1 - IS.LoopDependentIdx];
9854 Built.
PreCond = PreCond.get();
9857 Built.
Init = Init.get();
9859 Built.
LB = LB.
get();
9860 Built.
UB = UB.
get();
9861 Built.
IL = IL.
get();
9862 Built.
ST = ST.
get();
9864 Built.
NLB = NextLB.
get();
9865 Built.
NUB = NextUB.
get();
9880 return NestedLoopCount;
9884 auto CollapseClauses =
9885 OMPExecutableDirective::getClausesOfKind<OMPCollapseClause>(Clauses);
9886 if (CollapseClauses.begin() != CollapseClauses.end())
9887 return (*CollapseClauses.begin())->getNumForLoops();
9892 auto OrderedClauses =
9893 OMPExecutableDirective::getClausesOfKind<OMPOrderedClause>(Clauses);
9894 if (OrderedClauses.begin() != OrderedClauses.end())
9895 return (*OrderedClauses.begin())->getNumForLoops();
9904 for (
const OMPClause *Clause : Clauses) {
9906 Safelen = cast<OMPSafelenClause>(Clause);
9908 Simdlen = cast<OMPSimdlenClause>(Clause);
9909 if (Safelen && Simdlen)
9913 if (Simdlen && Safelen) {
9933 if (SimdlenRes > SafelenRes) {
9935 diag::err_omp_wrong_simdlen_safelen_values)
9950 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
9956 AStmt, *
this, *
DSAStack, VarsWithImplicitDSA, B);
9957 if (NestedLoopCount == 0)
9961 "omp simd loop exprs were not built");
9966 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
9989 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
9995 AStmt, *
this, *
DSAStack, VarsWithImplicitDSA, B);
9996 if (NestedLoopCount == 0)
10000 "omp for loop exprs were not built");
10005 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
10015 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
10025 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
10029 unsigned NestedLoopCount =
10032 VarsWithImplicitDSA, B);
10033 if (NestedLoopCount == 0)
10037 "omp for simd loop exprs were not built");
10042 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
10055 Clauses, AStmt, B);
10065 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
10066 auto BaseStmt = AStmt;
10067 while (
auto *CS = dyn_cast_or_null<CapturedStmt>(BaseStmt))
10069 if (
auto *C = dyn_cast_or_null<CompoundStmt>(BaseStmt)) {
10070 auto S = C->children();
10071 if (S.begin() == S.end())
10075 for (
Stmt *SectionStmt : llvm::drop_begin(S)) {
10076 if (!SectionStmt || !isa<OMPSectionDirective>(SectionStmt)) {
10078 Diag(SectionStmt->getBeginLoc(),
10079 diag::err_omp_sections_substmt_not_section);
10082 cast<OMPSectionDirective>(SectionStmt)
10083 ->setHasCancel(
DSAStack->isCancelRegion());
10086 Diag(AStmt->
getBeginLoc(), diag::err_omp_sections_not_compound_stmt);
10093 DSAStack->getTaskgroupReductionRef(),
10112 if (
auto *CE = dyn_cast<CallExpr>(E))
10113 if (CE->getDirectCallee())
10125 Stmt *S = cast<CapturedStmt>(AStmt)->getCapturedStmt();
10135 Expr *TargetCall =
nullptr;
10137 auto *E = dyn_cast<Expr>(S);
10139 Diag(S->getBeginLoc(), diag::err_omp_dispatch_statement_call);
10145 if (
auto *BO = dyn_cast<BinaryOperator>(E)) {
10146 if (BO->getOpcode() == BO_Assign)
10149 if (
auto *COCE = dyn_cast<CXXOperatorCallExpr>(E))
10150 if (COCE->getOperator() == OO_Equal)
10170 DSAStackTy *Stack) {
10171 bool ErrorFound =
false;
10173 if (
auto *LPC = dyn_cast<OMPLastprivateClause>(C)) {
10174 for (
Expr *RefExpr : LPC->varlists()) {
10177 Expr *SimpleRefExpr = RefExpr;
10180 auto &&Info = Stack->isLoopControlVariable(D);
10182 S.
Diag(ELoc, diag::err_omp_lastprivate_loop_var_non_loop_iteration)
10183 << getOpenMPDirectiveName(K);
10205 auto *CS = cast<CapturedStmt>(AStmt);
10217 AStmt, *
this, *
DSAStack, VarsWithImplicitDSA, B);
10218 if (NestedLoopCount == 0)
10222 "omp loop exprs were not built");
10226 NestedLoopCount, Clauses, AStmt, B);
10241 auto *CS = cast<CapturedStmt>(AStmt);
10249 ThisCaptureLevel > 1; --ThisCaptureLevel) {
10261 unsigned NestedLoopCount =
10264 VarsWithImplicitDSA, B);
10265 if (NestedLoopCount == 0)
10269 "omp loop exprs were not built");
10272 DSAStack->setParentTeamsRegionLoc(StartLoc);
10275 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
10291 auto *CS = cast<CapturedStmt>(AStmt);
10299 ThisCaptureLevel > 1; --ThisCaptureLevel) {
10311 unsigned NestedLoopCount =
10314 VarsWithImplicitDSA, B);
10315 if (NestedLoopCount == 0)
10319 "omp loop exprs were not built");
10324 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
10339 auto *CS = cast<CapturedStmt>(AStmt);
10347 ThisCaptureLevel > 1; --ThisCaptureLevel) {
10359 unsigned NestedLoopCount =
10362 VarsWithImplicitDSA, B);
10363 if (NestedLoopCount == 0)
10367 "omp loop exprs were not built");
10372 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
10388 auto *CS = cast<CapturedStmt>(AStmt);
10396 ThisCaptureLevel > 1; --ThisCaptureLevel) {
10408 unsigned NestedLoopCount =
10411 VarsWithImplicitDSA, B);
10412 if (NestedLoopCount == 0)
10416 "omp loop exprs were not built");
10421 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
10431 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
10438 const OMPClause *Copyprivate =
nullptr;
10439 for (
const OMPClause *Clause : Clauses) {
10443 Copyprivate = Clause;
10444 if (Copyprivate && Nowait) {
10446 diag::err_omp_single_copyprivate_with_nowait);
10484 bool ErrorFound =
false;
10487 bool DependentHint =
false;
10489 if (C->getClauseKind() == OMPC_hint) {
10491 Diag(C->getBeginLoc(), diag::err_omp_hint_clause_no_name);
10494 Expr *E = cast<OMPHintClause>(C)->getHint();
10497 DependentHint =
true;
10500 HintLoc = C->getBeginLoc();
10506 const auto Pair =
DSAStack->getCriticalWithHint(DirName);
10507 if (Pair.first && DirName.
getName() && !DependentHint) {
10508 if (llvm::APSInt::compareValues(Hint, Pair.second) != 0) {
10509 Diag(StartLoc, diag::err_omp_critical_with_hint);
10511 Diag(HintLoc, diag::note_omp_critical_hint_here)
10512 << 0 <<
toString(Hint, 10,
false);
10514 Diag(StartLoc, diag::note_omp_critical_no_hint) << 0;
10515 if (
const auto *C = Pair.first->getSingleClause<
OMPHintClause>()) {
10516 Diag(C->getBeginLoc(), diag::note_omp_critical_hint_here)
10521 Diag(Pair.first->getBeginLoc(), diag::note_omp_critical_no_hint) << 1;
10530 if (!Pair.first && DirName.
getName() && !DependentHint)
10531 DSAStack->addCriticalWithHint(Dir, Hint);
10541 auto *CS = cast<CapturedStmt>(AStmt);
10552 unsigned NestedLoopCount =
10555 VarsWithImplicitDSA, B);
10556 if (NestedLoopCount == 0)
10560 "omp parallel for loop exprs were not built");
10565 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
10575 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
10585 auto *CS = cast<CapturedStmt>(AStmt);
10596 unsigned NestedLoopCount =
10599 VarsWithImplicitDSA, B);
10600 if (NestedLoopCount == 0)
10606 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
10619 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
10629 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
10630 auto *CS = cast<CapturedStmt>(AStmt);
10641 Context, StartLoc, EndLoc, Clauses, AStmt,
10642 DSAStack->getTaskgroupReductionRef());
10652 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
10653 auto BaseStmt = AStmt;
10654 while (
auto *CS = dyn_cast_or_null<CapturedStmt>(BaseStmt))
10656 if (
auto *C = dyn_cast_or_null<CompoundStmt>(BaseStmt)) {
10657 auto S = C->children();
10658 if (S.begin() == S.end())
10662 for (
Stmt *SectionStmt : llvm::drop_begin(S)) {
10663 if (!SectionStmt || !isa<OMPSectionDirective>(SectionStmt)) {
10665 Diag(SectionStmt->getBeginLoc(),
10666 diag::err_omp_parallel_sections_substmt_not_section);
10669 cast<OMPSectionDirective>(SectionStmt)
10670 ->setHasCancel(
DSAStack->isCancelRegion());
10674 diag::err_omp_parallel_sections_not_compound_stmt);
10681 Context, StartLoc, EndLoc, Clauses, AStmt,
10690 bool ErrorFound =
false;
10692 if (llvm::is_contained(MutuallyExclusiveClauses, C->getClauseKind())) {
10695 }
else if (PrevClause->
getClauseKind() != C->getClauseKind()) {
10696 S.
Diag(C->getBeginLoc(), diag::err_omp_clauses_mutually_exclusive)
10697 << getOpenMPClauseName(C->getClauseKind())
10718 {OMPC_detach, OMPC_mergeable}))
10721 auto *CS = cast<CapturedStmt>(AStmt);
10758 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
10764 DSAStack->getTaskgroupReductionRef());
10773 if (C->getClauseKind() == OMPC_flush)
10774 FC = cast<OMPFlushClause>(C);
10781 if (C->getClauseKind() == OMPC_acq_rel ||
10782 C->getClauseKind() == OMPC_acquire ||
10783 C->getClauseKind() == OMPC_release) {
10784 if (MemOrderKind != OMPC_unknown) {
10785 Diag(C->getBeginLoc(), diag::err_omp_several_mem_order_clauses)
10786 << getOpenMPDirectiveName(OMPD_flush) << 1
10787 <<
SourceRange(C->getBeginLoc(), C->getEndLoc());
10788 Diag(MemOrderLoc, diag::note_omp_previous_mem_order_clause)
10789 << getOpenMPClauseName(MemOrderKind);
10791 MemOrderKind = C->getClauseKind();
10792 MemOrderLoc = C->getBeginLoc();
10796 if (FC && OrderClause) {
10799 Diag(OrderClause->
getBeginLoc(), diag::note_omp_flush_order_clause_here)
10809 if (Clauses.empty()) {
10810 Diag(StartLoc, diag::err_omp_depobj_expected);
10812 }
else if (Clauses[0]->getClauseKind() != OMPC_depobj) {
10813 Diag(Clauses[0]->getBeginLoc(), diag::err_omp_depobj_expected);
10817 if (Clauses.size() > 2) {
10818 Diag(Clauses[2]->getBeginLoc(),
10819 diag::err_omp_depobj_single_clause_expected);
10821 }
else if (Clauses.size() < 1) {
10822 Diag(Clauses[0]->getEndLoc(), diag::err_omp_depobj_single_clause_expected);
10832 if (Clauses.size() != 1) {
10833 Diag(Clauses.empty() ? EndLoc : Clauses[1]->getBeginLoc(),
10834 diag::err_omp_scan_single_clause_expected);
10839 Scope *ParentS = S->getParent();
10842 return StmtError(
Diag(StartLoc, diag::err_omp_orphaned_device_directive)
10843 << getOpenMPDirectiveName(OMPD_scan) << 5);
10847 if (
DSAStack->doesParentHasScanDirective()) {
10848 Diag(StartLoc, diag::err_omp_several_directives_in_region) <<
"scan";
10850 diag::note_omp_previous_directive)
10854 DSAStack->setParentHasScanDirective(StartLoc);
10862 const OMPClause *DependFound =
nullptr;
10863 const OMPClause *DependSourceClause =
nullptr;
10864 const OMPClause *DependSinkClause =
nullptr;
10865 bool ErrorFound =
false;
10869 if (
auto *DC = dyn_cast<OMPDependClause>(C)) {
10871 if (DC->getDependencyKind() == OMPC_DEPEND_source) {
10872 if (DependSourceClause) {
10873 Diag(C->getBeginLoc(), diag::err_omp_more_one_clause)
10874 << getOpenMPDirectiveName(OMPD_ordered)
10875 << getOpenMPClauseName(OMPC_depend) << 2;
10878 DependSourceClause = C;
10880 if (DependSinkClause) {
10881 Diag(C->getBeginLoc(), diag::err_omp_depend_sink_source_not_allowed)
10885 }
else if (DC->getDependencyKind() == OMPC_DEPEND_sink) {
10886 if (DependSourceClause) {
10887 Diag(C->getBeginLoc(), diag::err_omp_depend_sink_source_not_allowed)
10891 DependSinkClause = C;
10893 }
else if (C->getClauseKind() == OMPC_threads) {
10894 TC = cast<OMPThreadsClause>(C);
10895 }
else if (C->getClauseKind() == OMPC_simd) {
10896 SC = cast<OMPSIMDClause>(C);
10899 if (!ErrorFound && !SC &&
10904 Diag(StartLoc, diag::err_omp_prohibited_region_simd)
10905 << (
LangOpts.OpenMP >= 50 ? 1 : 0);
10907 }
else if (DependFound && (TC || SC)) {
10908 Diag(DependFound->
getBeginLoc(), diag::err_omp_depend_clause_thread_simd)
10911 }
else if (DependFound && !
DSAStack->getParentOrderedRegionParam().first) {
10913 diag::err_omp_ordered_directive_without_param);
10915 }
else if (TC || Clauses.empty()) {
10916 if (
const Expr *Param =
DSAStack->getParentOrderedRegionParam().first) {
10918 Diag(ErrLoc, diag::err_omp_ordered_directive_with_param)
10919 << (TC !=
nullptr);
10924 if ((!AStmt && !DependFound) || ErrorFound)
10932 if (!DependFound) {
10933 if (
DSAStack->doesParentHasOrderedDirective()) {
10934 Diag(StartLoc, diag::err_omp_several_directives_in_region) <<
"ordered";
10936 diag::note_omp_previous_directive)
10940 DSAStack->setParentHasOrderedDirective(StartLoc);
10944 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
10955 class OpenMPAtomicUpdateChecker {
10957 enum ExprAnalysisErrorCode {
10961 NotABinaryOrUnaryExpression,
10963 NotAnUnaryIncDecExpression,
10969 NotABinaryExpression,
10972 NotABinaryOperator,
10975 NotAnUpdateExpression,
10993 bool IsXLHSInRHSPart;
10998 bool IsPostfixUpdate;
11001 OpenMPAtomicUpdateChecker(
Sema &SemaRef)
11002 : SemaRef(SemaRef),
X(nullptr), E(nullptr), UpdateExpr(nullptr),
11003 IsXLHSInRHSPart(
false), Op(BO_PtrMemD), IsPostfixUpdate(
false) {}
11011 bool checkStatement(
Stmt *S,
unsigned DiagId = 0,
unsigned NoteId = 0);
11013 Expr *getX()
const {
return X; }
11015 Expr *getExpr()
const {
return E; }
11019 Expr *getUpdateExpr()
const {
return UpdateExpr; }
11022 bool isXLHSInRHSPart()
const {
return IsXLHSInRHSPart; }
11026 bool isPostfixUpdate()
const {
return IsPostfixUpdate; }
11029 bool checkBinaryOperation(
BinaryOperator *AtomicBinOp,
unsigned DiagId = 0,
11030 unsigned NoteId = 0);
11033 bool OpenMPAtomicUpdateChecker::checkBinaryOperation(
11034 BinaryOperator *AtomicBinOp,
unsigned DiagId,
unsigned NoteId) {
11035 ExprAnalysisErrorCode ErrorFound = NoError;
11041 if (AtomicBinOp->
getOpcode() == BO_Assign) {
11043 if (
const auto *AtomicInnerBinOp = dyn_cast<BinaryOperator>(
11045 if (AtomicInnerBinOp->isMultiplicativeOp() ||
11046 AtomicInnerBinOp->isAdditiveOp() || AtomicInnerBinOp->isShiftOp() ||
11047 AtomicInnerBinOp->isBitwiseOp()) {
11048 Op = AtomicInnerBinOp->getOpcode();
11049 OpLoc = AtomicInnerBinOp->getOperatorLoc();
11050 Expr *LHS = AtomicInnerBinOp->getLHS();
11051 Expr *RHS = AtomicInnerBinOp->getRHS();
11052 llvm::FoldingSetNodeID XId, LHSId, RHSId;
11059 if (XId == LHSId) {
11061 IsXLHSInRHSPart =
true;
11062 }
else if (XId == RHSId) {
11064 IsXLHSInRHSPart =
false;
11066 ErrorLoc = AtomicInnerBinOp->getExprLoc();
11067 ErrorRange = AtomicInnerBinOp->getSourceRange();
11068 NoteLoc =
X->getExprLoc();
11069 NoteRange =
X->getSourceRange();
11070 ErrorFound = NotAnUpdateExpression;
11073 ErrorLoc = AtomicInnerBinOp->getExprLoc();
11074 ErrorRange = AtomicInnerBinOp->getSourceRange();
11075 NoteLoc = AtomicInnerBinOp->getOperatorLoc();
11077 ErrorFound = NotABinaryOperator;
11082 ErrorFound = NotABinaryExpression;
11089 ErrorFound = NotAnAssignmentOp;
11091 if (ErrorFound != NoError && DiagId != 0 && NoteId != 0) {
11092 SemaRef.
Diag(ErrorLoc, DiagId) << ErrorRange;
11093 SemaRef.
Diag(NoteLoc, NoteId) << ErrorFound << NoteRange;
11097 E =
X = UpdateExpr =
nullptr;
11098 return ErrorFound != NoError;
11101 bool OpenMPAtomicUpdateChecker::checkStatement(
Stmt *S,
unsigned DiagId,
11103 ExprAnalysisErrorCode ErrorFound = NoError;
11114 if (
auto *AtomicBody = dyn_cast<Expr>(S)) {
11115 AtomicBody = AtomicBody->IgnoreParenImpCasts();
11116 if (AtomicBody->getType()->isScalarType() ||
11117 AtomicBody->isInstantiationDependent()) {
11118 if (
const auto *AtomicCompAssignOp = dyn_cast<CompoundAssignOperator>(
11119 AtomicBody->IgnoreParenImpCasts())) {
11122 AtomicCompAssignOp->getOpcode());
11123 OpLoc = AtomicCompAssignOp->getOperatorLoc();
11124 E = AtomicCompAssignOp->getRHS();
11125 X = AtomicCompAssignOp->getLHS()->IgnoreParens();
11126 IsXLHSInRHSPart =
true;
11127 }
else if (
auto *AtomicBinOp = dyn_cast<BinaryOperator>(
11128 AtomicBody->IgnoreParenImpCasts())) {
11130 if (checkBinaryOperation(AtomicBinOp, DiagId, NoteId))
11132 }
else if (
const auto *AtomicUnaryOp = dyn_cast<UnaryOperator>(
11133 AtomicBody->IgnoreParenImpCasts())) {
11135 if (AtomicUnaryOp->isIncrementDecrementOp()) {
11136 IsPostfixUpdate = AtomicUnaryOp->isPostfix();
11137 Op = AtomicUnaryOp->isIncrementOp() ? BO_Add : BO_Sub;
11138 OpLoc = AtomicUnaryOp->getOperatorLoc();
11139 X = AtomicUnaryOp->getSubExpr()->IgnoreParens();
11141 IsXLHSInRHSPart =
true;
11143 ErrorFound = NotAnUnaryIncDecExpression;
11144 ErrorLoc = AtomicUnaryOp->getExprLoc();
11145 ErrorRange = AtomicUnaryOp->getSourceRange();
11146 NoteLoc = AtomicUnaryOp->getOperatorLoc();
11149 }
else if (!AtomicBody->isInstantiationDependent()) {
11150 ErrorFound = NotABinaryOrUnaryExpression;
11151 NoteLoc = ErrorLoc = AtomicBody->getExprLoc();
11152 NoteRange = ErrorRange = AtomicBody->getSourceRange();
11155 ErrorFound = NotAScalarType;
11156 NoteLoc = ErrorLoc = AtomicBody->getBeginLoc();
11157 NoteRange = ErrorRange =
SourceRange(NoteLoc, NoteLoc);
11160 ErrorFound = NotAnExpression;
11161 NoteLoc = ErrorLoc = S->getBeginLoc();
11162 NoteRange = ErrorRange =
SourceRange(NoteLoc, NoteLoc);
11164 if (ErrorFound != NoError && DiagId != 0 && NoteId != 0) {
11165 SemaRef.
Diag(ErrorLoc, DiagId) << ErrorRange;
11166 SemaRef.
Diag(NoteLoc, NoteId) << ErrorFound << NoteRange;
11170 E =
X = UpdateExpr =
nullptr;
11171 if (ErrorFound == NoError && E &&
X) {
11181 IsXLHSInRHSPart ? OVEExpr : OVEX);
11188 UpdateExpr =
Update.get();
11190 return ErrorFound != NoError;
11194 llvm::FoldingSetNodeID getNodeId(
ASTContext &Context,
const Expr *S) {
11195 llvm::FoldingSetNodeID
Id;
11196 S->IgnoreParenImpCasts()->Profile(
Id, Context,
true);
11201 bool checkIfTwoExprsAreSame(
ASTContext &Context,
const Expr *LHS,
11203 return getNodeId(Context, LHS) == getNodeId(Context, RHS);
11206 class OpenMPAtomicCompareChecker {
11253 struct ErrorInfoTy {
11261 OpenMPAtomicCompareChecker(
Sema &S) : ContextRef(S.getASTContext()) {}
11264 bool checkStmt(
Stmt *S, ErrorInfoTy &ErrorInfo);
11266 Expr *getX()
const {
return X; }
11267 Expr *getE()
const {
return E; }
11268 Expr *getD()
const {
return D; }
11269 Expr *getCond()
const {
return C; }
11270 bool isXBinopExpr()
const {
return IsXBinopExpr; }
11289 bool IsXBinopExpr =
true;
11292 bool checkCondUpdateStmt(
IfStmt *S, ErrorInfoTy &ErrorInfo);
11295 bool checkCondExprStmt(
Stmt *S, ErrorInfoTy &ErrorInfo);
11298 bool checkType(ErrorInfoTy &ErrorInfo)
const;
11300 static bool CheckValue(
const Expr *E, ErrorInfoTy &ErrorInfo,
11301 bool ShouldBeLValue) {
11302 if (ShouldBeLValue && !E->
isLValue()) {
11303 ErrorInfo.Error = ErrorTy::XNotLValue;
11304 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = E->
getExprLoc();
11305 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = E->
getSourceRange();
11312 ErrorInfo.Error = ErrorTy::NotScalar;
11313 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = E->
getExprLoc();
11314 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = E->
getSourceRange();
11319 ErrorInfo.Error = ErrorTy::NotInteger;
11320 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = E->
getExprLoc();
11321 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = E->
getSourceRange();
11330 bool OpenMPAtomicCompareChecker::checkCondUpdateStmt(
IfStmt *S,
11331 ErrorInfoTy &ErrorInfo) {
11332 auto *Then = S->getThen();
11333 if (
auto *CS = dyn_cast<CompoundStmt>(Then)) {
11334 if (CS->body_empty()) {
11335 ErrorInfo.Error = ErrorTy::NoStmt;
11336 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11337 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
11340 if (CS->size() > 1) {
11341 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
11342 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11343 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getSourceRange();
11346 Then = CS->body_front();
11349 auto *BO = dyn_cast<BinaryOperator>(Then);
11351 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11352 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Then->getBeginLoc();
11353 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Then->getSourceRange();
11356 if (BO->getOpcode() != BO_Assign) {
11357 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11358 ErrorInfo.ErrorLoc = BO->getExprLoc();
11359 ErrorInfo.NoteLoc = BO->getOperatorLoc();
11360 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
11366 auto *Cond = dyn_cast<BinaryOperator>(S->getCond());
11368 ErrorInfo.Error = ErrorTy::NotABinaryOp;
11369 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->getCond()->getExprLoc();
11370 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getCond()->getSourceRange();
11374 switch (Cond->getOpcode()) {
11378 if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getLHS())) {
11380 }
else if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getRHS())) {
11383 ErrorInfo.Error = ErrorTy::InvalidComparison;
11384 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11385 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11393 if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getLHS()) &&
11394 checkIfTwoExprsAreSame(ContextRef, E, Cond->getRHS())) {
11396 }
else if (checkIfTwoExprsAreSame(ContextRef, E, Cond->getLHS()) &&
11397 checkIfTwoExprsAreSame(ContextRef,
X, Cond->getRHS())) {
11399 IsXBinopExpr =
false;
11401 ErrorInfo.Error = ErrorTy::InvalidComparison;
11402 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11403 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11409 ErrorInfo.Error = ErrorTy::InvalidBinaryOp;
11410 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11411 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11415 if (S->getElse()) {
11416 ErrorInfo.Error = ErrorTy::UnexpectedElse;
11417 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->getElse()->getBeginLoc();
11418 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getElse()->getSourceRange();
11425 bool OpenMPAtomicCompareChecker::checkCondExprStmt(
Stmt *S,
11426 ErrorInfoTy &ErrorInfo) {
11427 auto *BO = dyn_cast<BinaryOperator>(S);
11429 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11430 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->getBeginLoc();
11431 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getSourceRange();
11434 if (BO->getOpcode() != BO_Assign) {
11435 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11436 ErrorInfo.ErrorLoc = BO->getExprLoc();
11437 ErrorInfo.NoteLoc = BO->getOperatorLoc();
11438 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
11444 auto *CO = dyn_cast<ConditionalOperator>(BO->getRHS()->IgnoreParenImpCasts());
11446 ErrorInfo.Error = ErrorTy::NotCondOp;
11447 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = BO->getRHS()->getExprLoc();
11448 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getRHS()->getSourceRange();
11452 if (!checkIfTwoExprsAreSame(ContextRef,
X, CO->getFalseExpr())) {
11453 ErrorInfo.Error = ErrorTy::WrongFalseExpr;
11454 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CO->getFalseExpr()->getExprLoc();
11455 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
11456 CO->getFalseExpr()->getSourceRange();
11460 auto *Cond = dyn_cast<BinaryOperator>(CO->getCond());
11462 ErrorInfo.Error = ErrorTy::NotABinaryOp;
11463 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CO->getCond()->getExprLoc();
11464 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
11465 CO->getCond()->getSourceRange();
11469 switch (Cond->getOpcode()) {
11473 if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getLHS())) {
11475 }
else if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getRHS())) {
11478 ErrorInfo.Error = ErrorTy::InvalidComparison;
11479 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11480 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11488 if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getLHS()) &&
11489 checkIfTwoExprsAreSame(ContextRef, E, Cond->getRHS())) {
11491 }
else if (checkIfTwoExprsAreSame(ContextRef, E, Cond->getLHS()) &&
11492 checkIfTwoExprsAreSame(ContextRef,
X, Cond->getRHS())) {
11494 IsXBinopExpr =
false;
11496 ErrorInfo.Error = ErrorTy::InvalidComparison;
11497 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11498 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11504 ErrorInfo.Error = ErrorTy::InvalidBinaryOp;
11505 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11506 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11513 bool OpenMPAtomicCompareChecker::checkType(ErrorInfoTy &ErrorInfo)
const {
11515 assert(
X && E &&
"X and E cannot be nullptr");
11517 if (!CheckValue(
X, ErrorInfo,
true))
11520 if (!CheckValue(E, ErrorInfo,
false))
11523 if (D && !CheckValue(D, ErrorInfo,
false))
11529 bool OpenMPAtomicCompareChecker::checkStmt(
11530 Stmt *S, OpenMPAtomicCompareChecker::ErrorInfoTy &ErrorInfo) {
11531 auto *CS = dyn_cast<CompoundStmt>(S);
11533 if (CS->body_empty()) {
11534 ErrorInfo.Error = ErrorTy::NoStmt;
11535 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11536 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
11540 if (CS->size() != 1) {
11541 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
11542 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11543 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
11546 S = CS->body_front();
11551 if (
auto *IS = dyn_cast<IfStmt>(S)) {
11557 Res = checkCondUpdateStmt(IS, ErrorInfo);
11563 Res = checkCondExprStmt(S, ErrorInfo);
11569 return checkType(ErrorInfo);
11572 class OpenMPAtomicCompareCaptureChecker final
11573 :
public OpenMPAtomicCompareChecker {
11575 OpenMPAtomicCompareCaptureChecker(
Sema &S) : OpenMPAtomicCompareChecker(S) {}
11577 Expr *getV()
const {
return V; }
11578 Expr *getR()
const {
return R; }
11579 bool isFailOnly()
const {
return IsFailOnly; }
11582 bool checkStmt(
Stmt *S, ErrorInfoTy &ErrorInfo);
11585 bool checkType(ErrorInfoTy &ErrorInfo);
11597 bool checkForm3(
IfStmt *S, ErrorInfoTy &ErrorInfo);
11601 bool checkForm45(
Stmt *S, ErrorInfoTy &ErrorInfo);
11608 bool IsFailOnly =
false;
11611 bool OpenMPAtomicCompareCaptureChecker::checkType(ErrorInfoTy &ErrorInfo) {
11612 if (!OpenMPAtomicCompareChecker::checkType(ErrorInfo))
11615 if (
V && !CheckValue(
V, ErrorInfo,
true))
11618 if (R && !CheckValue(R, ErrorInfo,
true))
11624 bool OpenMPAtomicCompareCaptureChecker::checkForm3(
IfStmt *S,
11625 ErrorInfoTy &ErrorInfo) {
11628 auto *Then = S->getThen();
11629 if (
auto *CS = dyn_cast<CompoundStmt>(Then)) {
11630 if (CS->body_empty()) {
11631 ErrorInfo.Error = ErrorTy::NoStmt;
11632 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11633 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
11636 if (CS->size() > 1) {
11637 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
11638 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11639 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
11642 Then = CS->body_front();
11645 auto *BO = dyn_cast<BinaryOperator>(Then);
11647 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11648 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Then->getBeginLoc();
11649 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Then->getSourceRange();
11652 if (BO->getOpcode() != BO_Assign) {
11653 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11654 ErrorInfo.ErrorLoc = BO->getExprLoc();
11655 ErrorInfo.NoteLoc = BO->getOperatorLoc();
11656 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
11663 auto *Cond = dyn_cast<BinaryOperator>(S->getCond());
11665 ErrorInfo.Error = ErrorTy::NotABinaryOp;
11666 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->getCond()->getExprLoc();
11667 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getCond()->getSourceRange();
11670 if (Cond->getOpcode() != BO_EQ) {
11671 ErrorInfo.Error = ErrorTy::NotEQ;
11672 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11673 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11677 if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getLHS())) {
11678 E = Cond->getRHS();
11679 }
else if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getRHS())) {
11680 E = Cond->getLHS();
11682 ErrorInfo.Error = ErrorTy::InvalidComparison;
11683 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11684 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11690 if (!S->getElse()) {
11691 ErrorInfo.Error = ErrorTy::NoElse;
11692 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->getBeginLoc();
11693 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getSourceRange();
11697 auto *Else = S->getElse();
11698 if (
auto *CS = dyn_cast<CompoundStmt>(Else)) {
11699 if (CS->body_empty()) {
11700 ErrorInfo.Error = ErrorTy::NoStmt;
11701 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11702 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
11705 if (CS->size() > 1) {
11706 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
11707 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11708 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getSourceRange();
11711 Else = CS->body_front();
11714 auto *ElseBO = dyn_cast<BinaryOperator>(Else);
11716 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11717 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Else->getBeginLoc();
11718 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Else->getSourceRange();
11721 if (ElseBO->getOpcode() != BO_Assign) {
11722 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11723 ErrorInfo.ErrorLoc = ElseBO->getExprLoc();
11724 ErrorInfo.NoteLoc = ElseBO->getOperatorLoc();
11725 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ElseBO->getSourceRange();
11729 if (!checkIfTwoExprsAreSame(ContextRef,
X, ElseBO->getRHS())) {
11730 ErrorInfo.Error = ErrorTy::InvalidAssignment;
11731 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ElseBO->getRHS()->getExprLoc();
11732 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
11733 ElseBO->getRHS()->getSourceRange();
11737 V = ElseBO->getLHS();
11739 return checkType(ErrorInfo);
11742 bool OpenMPAtomicCompareCaptureChecker::checkForm45(
Stmt *S,
11743 ErrorInfoTy &ErrorInfo) {
11746 auto *CS = cast<CompoundStmt>(S);
11747 assert(CS->size() == 2 &&
"CompoundStmt size is not expected");
11748 auto *S1 = cast<BinaryOperator>(CS->body_front());
11749 auto *S2 = cast<IfStmt>(CS->body_back());
11750 assert(S1->getOpcode() == BO_Assign &&
"unexpected binary operator");
11752 if (!checkIfTwoExprsAreSame(ContextRef, S1->getLHS(), S2->getCond())) {
11753 ErrorInfo.Error = ErrorTy::InvalidCondition;
11754 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S2->getCond()->getExprLoc();
11755 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S1->getLHS()->getSourceRange();
11761 auto *Then = S2->getThen();
11762 if (
auto *ThenCS = dyn_cast<CompoundStmt>(Then)) {
11763 if (ThenCS->body_empty()) {
11764 ErrorInfo.Error = ErrorTy::NoStmt;
11765 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ThenCS->getBeginLoc();
11766 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ThenCS->getSourceRange();
11769 if (ThenCS->size() > 1) {
11770 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
11771 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ThenCS->getBeginLoc();
11772 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ThenCS->getSourceRange();
11775 Then = ThenCS->body_front();
11778 auto *ThenBO = dyn_cast<BinaryOperator>(Then);
11780 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11781 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S2->getBeginLoc();
11782 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S2->getSourceRange();
11785 if (ThenBO->getOpcode() != BO_Assign) {
11786 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11787 ErrorInfo.ErrorLoc = ThenBO->getExprLoc();
11788 ErrorInfo.NoteLoc = ThenBO->getOperatorLoc();
11789 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ThenBO->getSourceRange();
11793 X = ThenBO->getLHS();
11794 D = ThenBO->getRHS();
11796 auto *BO = cast<BinaryOperator>(S1->getRHS()->IgnoreImpCasts());
11797 if (BO->getOpcode() != BO_EQ) {
11798 ErrorInfo.Error = ErrorTy::NotEQ;
11799 ErrorInfo.ErrorLoc = BO->getExprLoc();
11800 ErrorInfo.NoteLoc = BO->getOperatorLoc();
11801 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
11807 if (checkIfTwoExprsAreSame(ContextRef,
X, BO->getLHS())) {
11809 }
else if (checkIfTwoExprsAreSame(ContextRef,
X, BO->getRHS())) {
11812 ErrorInfo.Error = ErrorTy::InvalidComparison;
11813 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = BO->getExprLoc();
11814 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
11818 if (S2->getElse()) {
11821 auto *Else = S2->getElse();
11822 if (
auto *ElseCS = dyn_cast<CompoundStmt>(Else)) {
11823 if (ElseCS->body_empty()) {
11824 ErrorInfo.Error = ErrorTy::NoStmt;
11825 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ElseCS->getBeginLoc();
11826 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ElseCS->getSourceRange();
11829 if (ElseCS->size() > 1) {
11830 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
11831 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ElseCS->getBeginLoc();
11832 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ElseCS->getSourceRange();
11835 Else = ElseCS->body_front();
11838 auto *ElseBO = dyn_cast<BinaryOperator>(Else);
11840 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11841 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Else->getBeginLoc();
11842 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Else->getSourceRange();
11845 if (ElseBO->getOpcode() != BO_Assign) {
11846 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11847 ErrorInfo.ErrorLoc = ElseBO->getExprLoc();
11848 ErrorInfo.NoteLoc = ElseBO->getOperatorLoc();
11849 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ElseBO->getSourceRange();
11852 if (!checkIfTwoExprsAreSame(ContextRef,
X, ElseBO->getRHS())) {
11853 ErrorInfo.Error = ErrorTy::InvalidAssignment;
11854 ErrorInfo.ErrorLoc = ElseBO->getRHS()->getExprLoc();
11855 ErrorInfo.NoteLoc =
X->getExprLoc();
11856 ErrorInfo.ErrorRange = ElseBO->getRHS()->getSourceRange();
11857 ErrorInfo.NoteRange =
X->getSourceRange();
11861 V = ElseBO->getLHS();
11864 return checkType(ErrorInfo);
11867 bool OpenMPAtomicCompareCaptureChecker::checkStmt(
Stmt *S,
11868 ErrorInfoTy &ErrorInfo) {
11870 if (
auto *IS = dyn_cast<IfStmt>(S))
11871 return checkForm3(IS, ErrorInfo);
11873 auto *CS = dyn_cast<CompoundStmt>(S);
11875 ErrorInfo.Error = ErrorTy::NotCompoundStmt;
11876 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->getBeginLoc();
11877 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getSourceRange();
11880 if (CS->body_empty()) {
11881 ErrorInfo.Error = ErrorTy::NoStmt;
11882 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11883 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
11888 if (CS->size() == 1) {
11889 auto *IS = dyn_cast<IfStmt>(CS->body_front());
11891 ErrorInfo.Error = ErrorTy::NotIfStmt;
11892 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->body_front()->
getBeginLoc();
11893 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
11898 return checkForm3(IS, ErrorInfo);
11899 }
else if (CS->size() == 2) {
11900 auto *S1 = CS->body_front();
11901 auto *S2 = CS->body_back();
11903 Stmt *UpdateStmt =
nullptr;
11904 Stmt *CondUpdateStmt =
nullptr;
11906 if (
auto *BO = dyn_cast<BinaryOperator>(S1)) {
11909 CondUpdateStmt = S2;
11911 if (dyn_cast<BinaryOperator>(BO->getRHS()->IgnoreImpCasts()) &&
11912 dyn_cast<IfStmt>(S2))
11913 return checkForm45(CS, ErrorInfo);
11917 CondUpdateStmt = S1;
11920 auto CheckCondUpdateStmt = [
this, &ErrorInfo](
Stmt *CUS) {
11921 auto *IS = dyn_cast<IfStmt>(CUS);
11923 ErrorInfo.Error = ErrorTy::NotIfStmt;
11924 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CUS->getBeginLoc();
11925 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CUS->getSourceRange();
11929 if (!checkCondUpdateStmt(IS, ErrorInfo))
11936 auto CheckUpdateStmt = [
this, &ErrorInfo](
Stmt *US) {
11937 auto *BO = dyn_cast<BinaryOperator>(US);
11939 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11940 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = US->getBeginLoc();
11941 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = US->getSourceRange();
11944 if (BO->getOpcode() != BO_Assign) {
11945 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11946 ErrorInfo.ErrorLoc = BO->getExprLoc();
11947 ErrorInfo.NoteLoc = BO->getOperatorLoc();
11948 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
11951 if (!checkIfTwoExprsAreSame(ContextRef, this->X, BO->getRHS())) {
11952 ErrorInfo.Error = ErrorTy::InvalidAssignment;
11953 ErrorInfo.ErrorLoc = BO->getRHS()->getExprLoc();
11955 ErrorInfo.ErrorRange = BO->getRHS()->getSourceRange();
11960 this->
V = BO->getLHS();
11965 if (!CheckCondUpdateStmt(CondUpdateStmt))
11967 if (!CheckUpdateStmt(UpdateStmt))
11970 ErrorInfo.Error = ErrorTy::MoreThanTwoStmts;
11971 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11972 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
11976 return checkType(ErrorInfo);
11985 DSAStack->addAtomicDirectiveLoc(StartLoc);
11998 bool MutexClauseEncountered =
false;
11999 llvm::SmallSet<OpenMPClauseKind, 2> EncounteredAtomicKinds;
12001 switch (C->getClauseKind()) {
12005 MutexClauseEncountered =
true;
12008 case OMPC_compare: {
12009 if (AtomicKind != OMPC_unknown && MutexClauseEncountered) {
12010 Diag(C->getBeginLoc(), diag::err_omp_atomic_several_clauses)
12011 <<
SourceRange(C->getBeginLoc(), C->getEndLoc());
12012 Diag(AtomicKindLoc, diag::note_omp_previous_mem_order_clause)
12013 << getOpenMPClauseName(AtomicKind);
12015 AtomicKind = C->getClauseKind();
12016 AtomicKindLoc = C->getBeginLoc();
12017 if (!EncounteredAtomicKinds.insert(C->getClauseKind()).second) {
12018 Diag(C->getBeginLoc(), diag::err_omp_atomic_several_clauses)
12019 <<
SourceRange(C->getBeginLoc(), C->getEndLoc());
12020 Diag(AtomicKindLoc, diag::note_omp_previous_mem_order_clause)
12021 << getOpenMPClauseName(AtomicKind);
12030 case OMPC_relaxed: {
12031 if (MemOrderKind != OMPC_unknown) {
12032 Diag(C->getBeginLoc(), diag::err_omp_several_mem_order_clauses)
12033 << getOpenMPDirectiveName(OMPD_atomic) << 0
12034 <<
SourceRange(C->getBeginLoc(), C->getEndLoc());
12035 Diag(MemOrderLoc, diag::note_omp_previous_mem_order_clause)
12036 << getOpenMPClauseName(MemOrderKind);
12038 MemOrderKind = C->getClauseKind();
12039 MemOrderLoc = C->getBeginLoc();
12047 llvm_unreachable(
"unknown clause is encountered");
12050 bool IsCompareCapture =
false;
12051 if (EncounteredAtomicKinds.contains(OMPC_compare) &&
12052 EncounteredAtomicKinds.contains(OMPC_capture)) {
12053 IsCompareCapture =
true;
12054 AtomicKind = OMPC_compare;
12063 if ((AtomicKind == OMPC_read &&
12064 (MemOrderKind == OMPC_acq_rel || MemOrderKind == OMPC_release)) ||
12065 ((AtomicKind == OMPC_write || AtomicKind == OMPC_update ||
12066 AtomicKind == OMPC_unknown) &&
12067 (MemOrderKind == OMPC_acq_rel || MemOrderKind == OMPC_acquire))) {
12069 if (AtomicKind == OMPC_unknown)
12071 Diag(Loc, diag::err_omp_atomic_incompatible_mem_order_clause)
12072 << getOpenMPClauseName(AtomicKind)
12073 << (AtomicKind == OMPC_unknown ? 1 : 0)
12074 << getOpenMPClauseName(MemOrderKind);
12075 Diag(MemOrderLoc, diag::note_omp_previous_mem_order_clause)
12076 << getOpenMPClauseName(MemOrderKind);
12079 Stmt *Body = AStmt;
12080 if (
auto *EWC = dyn_cast<ExprWithCleanups>(Body))
12081 Body = EWC->getSubExpr();
12086 Expr *UE =
nullptr;
12088 Expr *CE =
nullptr;
12089 bool IsXLHSInRHSPart =
false;
12090 bool IsPostfixUpdate =
false;
12113 if (AtomicKind == OMPC_read) {
12120 } ErrorFound = NoError;
12125 if (
const auto *AtomicBody = dyn_cast<Expr>(Body)) {
12126 const auto *AtomicBinOp =
12127 dyn_cast<BinaryOperator>(AtomicBody->IgnoreParenImpCasts());
12128 if (AtomicBinOp && AtomicBinOp->
getOpcode() == BO_Assign) {
12131 if ((
X->isInstantiationDependent() ||
X->getType()->isScalarType()) &&
12132 (
V->isInstantiationDependent() ||
V->getType()->isScalarType())) {
12133 if (!
X->isLValue() || !
V->isLValue()) {
12134 const Expr *NotLValueExpr =
X->isLValue() ?
V :
X;
12135 ErrorFound = NotAnLValue;
12141 }
else if (!
X->isInstantiationDependent() ||
12142 !
V->isInstantiationDependent()) {
12143 const Expr *NotScalarExpr =
12144 (
X->isInstantiationDependent() ||
X->getType()->isScalarType())
12147 ErrorFound = NotAScalarType;
12153 }
else if (!AtomicBody->isInstantiationDependent()) {
12154 ErrorFound = NotAnAssignmentOp;
12155 ErrorLoc = AtomicBody->getExprLoc();
12156 ErrorRange = AtomicBody->getSourceRange();
12158 : AtomicBody->getExprLoc();
12160 : AtomicBody->getSourceRange();
12163 ErrorFound = NotAnExpression;
12165 NoteRange = ErrorRange =
SourceRange(NoteLoc, NoteLoc);
12167 if (ErrorFound != NoError) {
12168 Diag(ErrorLoc, diag::err_omp_atomic_read_not_expression_statement)
12170 Diag(NoteLoc, diag::note_omp_atomic_read_write)
12171 << ErrorFound << NoteRange;
12176 }
else if (AtomicKind == OMPC_write) {
12183 } ErrorFound = NoError;
12188 if (
const auto *AtomicBody = dyn_cast<Expr>(Body)) {
12189 const auto *AtomicBinOp =
12190 dyn_cast<BinaryOperator>(AtomicBody->IgnoreParenImpCasts());
12191 if (AtomicBinOp && AtomicBinOp->
getOpcode() == BO_Assign) {
12193 E = AtomicBinOp->
getRHS();
12194 if ((
X->isInstantiationDependent() ||
X->getType()->isScalarType()) &&
12196 if (!
X->isLValue()) {
12197 ErrorFound = NotAnLValue;
12200 NoteLoc =
X->getExprLoc();
12201 NoteRange =
X->getSourceRange();
12203 }
else if (!
X->isInstantiationDependent() ||
12205 const Expr *NotScalarExpr =
12206 (
X->isInstantiationDependent() ||
X->getType()->isScalarType())
12209 ErrorFound = NotAScalarType;
12215 }
else if (!AtomicBody->isInstantiationDependent()) {
12216 ErrorFound = NotAnAssignmentOp;
12217 ErrorLoc = AtomicBody->getExprLoc();
12218 ErrorRange = AtomicBody->getSourceRange();
12220 : AtomicBody->getExprLoc();
12222 : AtomicBody->getSourceRange();
12225 ErrorFound = NotAnExpression;
12227 NoteRange = ErrorRange =
SourceRange(NoteLoc, NoteLoc);
12229 if (ErrorFound != NoError) {
12230 Diag(ErrorLoc, diag::err_omp_atomic_write_not_expression_statement)
12232 Diag(NoteLoc, diag::note_omp_atomic_read_write)
12233 << ErrorFound << NoteRange;
12238 }
else if (AtomicKind == OMPC_update || AtomicKind == OMPC_unknown) {
12247 OpenMPAtomicUpdateChecker Checker(*
this);
12248 if (Checker.checkStatement(
12250 (AtomicKind == OMPC_update)
12251 ? diag::err_omp_atomic_update_not_expression_statement
12252 : diag::err_omp_atomic_not_expression_statement,
12253 diag::note_omp_atomic_update))
12256 E = Checker.getExpr();
12257 X = Checker.getX();
12258 UE = Checker.getUpdateExpr();
12259 IsXLHSInRHSPart = Checker.isXLHSInRHSPart();
12261 }
else if (AtomicKind == OMPC_capture) {
12264 NotACompoundStatement,
12265 NotTwoSubstatements,
12266 NotASpecificExpression,
12268 } ErrorFound = NoError;
12271 if (
const auto *AtomicBody = dyn_cast<Expr>(Body)) {
12280 const auto *AtomicBinOp =
12281 dyn_cast<BinaryOperator>(AtomicBody->IgnoreParenImpCasts());
12282 if (AtomicBinOp && AtomicBinOp->
getOpcode() == BO_Assign) {
12285 OpenMPAtomicUpdateChecker Checker(*
this);
12286 if (Checker.checkStatement(
12287 Body, diag::err_omp_atomic_capture_not_expression_statement,
12288 diag::note_omp_atomic_update))
12290 E = Checker.getExpr();
12291 X = Checker.getX();
12292 UE = Checker.getUpdateExpr();
12293 IsXLHSInRHSPart = Checker.isXLHSInRHSPart();
12294 IsPostfixUpdate = Checker.isPostfixUpdate();
12295 }
else if (!AtomicBody->isInstantiationDependent()) {
12296 ErrorLoc = AtomicBody->getExprLoc();
12297 ErrorRange = AtomicBody->getSourceRange();
12299 : AtomicBody->getExprLoc();
12301 : AtomicBody->getSourceRange();
12302 ErrorFound = NotAnAssignmentOp;
12304 if (ErrorFound != NoError) {
12305 Diag(ErrorLoc, diag::err_omp_atomic_capture_not_expression_statement)
12307 Diag(NoteLoc, diag::note_omp_atomic_capture) << ErrorFound << NoteRange;
12311 UE =
V = E =
X =
nullptr;
12329 if (
auto *CS = dyn_cast<CompoundStmt>(Body)) {
12331 if (CS->size() == 2) {
12333 Stmt *Second = CS->body_back();
12334 if (
auto *EWC = dyn_cast<ExprWithCleanups>(
First))
12335 First = EWC->getSubExpr()->IgnoreParenImpCasts();
12336 if (
auto *EWC = dyn_cast<ExprWithCleanups>(Second))
12337 Second = EWC->getSubExpr()->IgnoreParenImpCasts();
12339 OpenMPAtomicUpdateChecker Checker(*
this);
12340 bool IsUpdateExprFound = !Checker.checkStatement(Second);
12342 if (IsUpdateExprFound) {
12343 BinOp = dyn_cast<BinaryOperator>(
First);
12344 IsUpdateExprFound = BinOp && BinOp->
getOpcode() == BO_Assign;
12356 llvm::FoldingSetNodeID XId, PossibleXId;
12357 Checker.getX()->Profile(XId,
Context,
true);
12359 IsUpdateExprFound = XId == PossibleXId;
12360 if (IsUpdateExprFound) {
12362 X = Checker.getX();
12363 E = Checker.getExpr();
12364 UE = Checker.getUpdateExpr();
12365 IsXLHSInRHSPart = Checker.isXLHSInRHSPart();
12366 IsPostfixUpdate =
true;
12369 if (!IsUpdateExprFound) {
12370 IsUpdateExprFound = !Checker.checkStatement(
First);
12372 if (IsUpdateExprFound) {
12373 BinOp = dyn_cast<BinaryOperator>(Second);
12374 IsUpdateExprFound = BinOp && BinOp->
getOpcode() == BO_Assign;
12386 llvm::FoldingSetNodeID XId, PossibleXId;
12387 Checker.getX()->Profile(XId,
Context,
true);
12389 IsUpdateExprFound = XId == PossibleXId;
12390 if (IsUpdateExprFound) {
12392 X = Checker.getX();
12393 E = Checker.getExpr();
12394 UE = Checker.getUpdateExpr();
12395 IsXLHSInRHSPart = Checker.isXLHSInRHSPart();
12396 IsPostfixUpdate =
false;
12400 if (!IsUpdateExprFound) {
12402 auto *FirstExpr = dyn_cast<Expr>(
First);
12403 auto *SecondExpr = dyn_cast<Expr>(Second);
12404 if (!FirstExpr || !SecondExpr ||
12405 !(FirstExpr->isInstantiationDependent() ||
12406 SecondExpr->isInstantiationDependent())) {
12407 auto *FirstBinOp = dyn_cast<BinaryOperator>(
First);
12408 if (!FirstBinOp || FirstBinOp->getOpcode() != BO_Assign) {
12409 ErrorFound = NotAnAssignmentOp;
12410 NoteLoc = ErrorLoc = FirstBinOp ? FirstBinOp->getOperatorLoc()
12411 :
First->getBeginLoc();
12412 NoteRange = ErrorRange = FirstBinOp
12413 ? FirstBinOp->getSourceRange()
12416 auto *SecondBinOp = dyn_cast<BinaryOperator>(Second);
12417 if (!SecondBinOp || SecondBinOp->getOpcode() != BO_Assign) {
12418 ErrorFound = NotAnAssignmentOp;
12419 NoteLoc = ErrorLoc = SecondBinOp
12420 ? SecondBinOp->getOperatorLoc()
12422 NoteRange = ErrorRange =
12423 SecondBinOp ? SecondBinOp->getSourceRange()
12426 Expr *PossibleXRHSInFirst =
12428 Expr *PossibleXLHSInSecond =
12430 llvm::FoldingSetNodeID X1Id, X2Id;
12435 IsUpdateExprFound = X1Id == X2Id;
12436 if (IsUpdateExprFound) {
12437 V = FirstBinOp->getLHS();
12438 X = SecondBinOp->getLHS();
12439 E = SecondBinOp->getRHS();
12441 IsXLHSInRHSPart =
false;
12442 IsPostfixUpdate =
true;
12444 ErrorFound = NotASpecificExpression;
12445 ErrorLoc = FirstBinOp->getExprLoc();
12446 ErrorRange = FirstBinOp->getSourceRange();
12447 NoteLoc = SecondBinOp->getLHS()->getExprLoc();
12448 NoteRange = SecondBinOp->getRHS()->getSourceRange();
12456 NoteRange = ErrorRange =
12458 ErrorFound = NotTwoSubstatements;
12462 NoteRange = ErrorRange =
12464 ErrorFound = NotACompoundStatement;
12467 if (ErrorFound != NoError) {
12468 Diag(ErrorLoc, diag::err_omp_atomic_capture_not_compound_statement)
12470 Diag(NoteLoc, diag::note_omp_atomic_capture) << ErrorFound << NoteRange;
12474 UE =
V = E =
X =
nullptr;
12475 }
else if (AtomicKind == OMPC_compare) {
12476 if (IsCompareCapture) {
12477 OpenMPAtomicCompareCaptureChecker::ErrorInfoTy ErrorInfo;
12478 OpenMPAtomicCompareCaptureChecker Checker(*
this);
12479 if (!Checker.checkStmt(Body, ErrorInfo)) {
12480 Diag(ErrorInfo.ErrorLoc, diag::err_omp_atomic_compare_capture)
12481 << ErrorInfo.ErrorRange;
12482 Diag(ErrorInfo.NoteLoc, diag::note_omp_atomic_compare)
12483 << ErrorInfo.Error << ErrorInfo.NoteRange;
12489 OpenMPAtomicCompareChecker::ErrorInfoTy ErrorInfo;
12490 OpenMPAtomicCompareChecker Checker(*
this);
12491 if (!Checker.checkStmt(Body, ErrorInfo)) {
12492 Diag(ErrorInfo.ErrorLoc, diag::err_omp_atomic_compare)
12493 << ErrorInfo.ErrorRange;
12494 Diag(ErrorInfo.NoteLoc, diag::note_omp_atomic_compare)
12495 << ErrorInfo.Error << ErrorInfo.NoteRange;
12498 X = Checker.getX();
12499 E = Checker.getE();
12500 D = Checker.getD();
12501 CE = Checker.getCond();
12503 IsXLHSInRHSPart = Checker.isXBinopExpr();
12510 X,
V, E, UE, D, CE, IsXLHSInRHSPart,
12521 auto *CS = cast<CapturedStmt>(AStmt);
12529 ThisCaptureLevel > 1; --ThisCaptureLevel) {
12543 if (
DSAStack->hasInnerTeamsRegion()) {
12545 bool OMPTeamsFound =
true;
12546 if (
const auto *CS = dyn_cast<CompoundStmt>(S)) {
12547 auto I = CS->body_begin();
12548 while (I != CS->body_end()) {
12549 const auto *OED = dyn_cast<OMPExecutableDirective>(*I);
12553 OMPTeamsFound =
false;
12558 assert(I != CS->body_end() &&
"Not found statement");
12561 const auto *OED = dyn_cast<OMPExecutableDirective>(S);
12564 if (!OMPTeamsFound) {
12565 Diag(StartLoc, diag::err_omp_target_contains_not_only_teams);
12567 diag::note_omp_nested_teams_construct_here);
12568 Diag(S->getBeginLoc(), diag::note_omp_nested_statement_here)
12569 << isa<OMPExecutableDirective>(S);
12586 auto *CS = cast<CapturedStmt>(AStmt);
12594 ThisCaptureLevel > 1; --ThisCaptureLevel) {
12607 Context, StartLoc, EndLoc, Clauses, AStmt,
12617 auto *CS = cast<CapturedStmt>(AStmt);
12625 ThisCaptureLevel > 1; --ThisCaptureLevel) {
12638 unsigned NestedLoopCount =
12641 VarsWithImplicitDSA, B);
12642 if (NestedLoopCount == 0)
12646 "omp target parallel for loop exprs were not built");
12651 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
12661 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
12668 return llvm::any_of(
12669 Clauses, [K](
const OMPClause *C) {
return C->getClauseKind() == K; });
12672 template <
typename... Params>
12674 const Params... ClauseTypes) {
12681 if (
auto *TC = dyn_cast<OMPToClause>(C))
12682 return llvm::all_of(TC->all_decls(), [](
ValueDecl *VD) {
12683 return !VD || !VD->hasAttr<OMPDeclareTargetDeclAttr>() ||
12684 (VD->isExternallyVisible() &&
12685 VD->getVisibility() != HiddenVisibility);
12687 else if (
auto *FC = dyn_cast<OMPFromClause>(C))
12688 return llvm::all_of(FC->all_decls(), [](
ValueDecl *VD) {
12689 return !VD || !VD->hasAttr<OMPDeclareTargetDeclAttr>() ||
12690 (VD->isExternallyVisible() &&
12691 VD->getVisibility() != HiddenVisibility);
12705 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
12710 if (!
hasClauses(Clauses, OMPC_map, OMPC_use_device_ptr) &&
12714 Expected =
"'map' or 'use_device_ptr'";
12716 Expected =
"'map', 'use_device_ptr', or 'use_device_addr'";
12717 Diag(StartLoc, diag::err_omp_no_clause_for_directive)
12718 <<
Expected << getOpenMPDirectiveName(OMPD_target_data);
12735 auto *CS = cast<CapturedStmt>(AStmt);
12743 ThisCaptureLevel > 1; --ThisCaptureLevel) {
12756 Diag(StartLoc, diag::err_omp_no_clause_for_directive)
12757 <<
"'map'" << getOpenMPDirectiveName(OMPD_target_enter_data);
12772 auto *CS = cast<CapturedStmt>(AStmt);
12780 ThisCaptureLevel > 1; --ThisCaptureLevel) {
12793 Diag(StartLoc, diag::err_omp_no_clause_for_directive)
12794 <<
"'map'" << getOpenMPDirectiveName(OMPD_target_exit_data);
12809 auto *CS = cast<CapturedStmt>(AStmt);
12817 ThisCaptureLevel > 1; --ThisCaptureLevel) {
12827 if (!
hasClauses(Clauses, OMPC_to, OMPC_from)) {
12828 Diag(StartLoc, diag::err_omp_at_least_one_motion_clause_required);
12833 Diag(StartLoc, diag::err_omp_cannot_update_with_internal_linkage);
12847 auto *CS = cast<CapturedStmt>(AStmt);
12857 DSAStack->setParentTeamsRegionLoc(StartLoc);
12866 if (
DSAStack->isParentNowaitRegion()) {
12867 Diag(StartLoc, diag::err_omp_parent_cancel_region_nowait) << 0;
12870 if (
DSAStack->isParentOrderedRegion()) {
12871 Diag(StartLoc, diag::err_omp_parent_cancel_region_ordered) << 0;
12882 if (
DSAStack->isParentNowaitRegion()) {
12883 Diag(StartLoc, diag::err_omp_parent_cancel_region_nowait) << 1;
12886 if (
DSAStack->isParentOrderedRegion()) {
12887 Diag(StartLoc, diag::err_omp_parent_cancel_region_ordered) << 1;
12890 DSAStack->setParentCancelRegion(
true);
12897 const OMPClause *ReductionClause =
nullptr;
12898 const OMPClause *NogroupClause =
nullptr;
12900 if (C->getClauseKind() == OMPC_reduction) {
12901 ReductionClause = C;
12906 if (C->getClauseKind() == OMPC_nogroup) {
12908 if (ReductionClause)
12913 if (ReductionClause && NogroupClause) {
12914 S.
Diag(ReductionClause->
getBeginLoc(), diag::err_omp_reduction_with_nogroup)
12928 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
12932 unsigned NestedLoopCount =
12935 VarsWithImplicitDSA, B);
12936 if (NestedLoopCount == 0)
12940 "omp for loop exprs were not built");
12946 {OMPC_grainsize, OMPC_num_tasks}))
12956 NestedLoopCount, Clauses, AStmt, B,
12966 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
12970 unsigned NestedLoopCount =
12973 VarsWithImplicitDSA, B);
12974 if (NestedLoopCount == 0)
12978 "omp for loop exprs were not built");
12983 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
12995 {OMPC_grainsize, OMPC_num_tasks}))
13007 NestedLoopCount, Clauses, AStmt, B);
13016 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13020 unsigned NestedLoopCount =
13023 VarsWithImplicitDSA, B);
13024 if (NestedLoopCount == 0)
13028 "omp for loop exprs were not built");
13034 {OMPC_grainsize, OMPC_num_tasks}))
13044 NestedLoopCount, Clauses, AStmt, B,
13054 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13058 unsigned NestedLoopCount =
13061 VarsWithImplicitDSA, B);
13062 if (NestedLoopCount == 0)
13066 "omp for loop exprs were not built");
13071 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13083 {OMPC_grainsize, OMPC_num_tasks}))
13095 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13104 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13105 auto *CS = cast<CapturedStmt>(AStmt);
13112 for (
int ThisCaptureLevel =
13114 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13130 VarsWithImplicitDSA, B);
13131 if (NestedLoopCount == 0)
13135 "omp for loop exprs were not built");
13141 {OMPC_grainsize, OMPC_num_tasks}))
13151 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
13161 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13162 auto *CS = cast<CapturedStmt>(AStmt);
13169 for (
int ThisCaptureLevel =
13171 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13187 VarsWithImplicitDSA, B);
13188 if (NestedLoopCount == 0)
13192 "omp for loop exprs were not built");
13197 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13209 {OMPC_grainsize, OMPC_num_tasks}))
13221 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13230 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13234 unsigned NestedLoopCount =
13237 *
this, *
DSAStack, VarsWithImplicitDSA, B);
13238 if (NestedLoopCount == 0)
13242 "omp for loop exprs were not built");
13246 NestedLoopCount, Clauses, AStmt, B);
13255 auto *CS = cast<CapturedStmt>(AStmt);
13262 for (
int ThisCaptureLevel =
13264 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13280 VarsWithImplicitDSA, B);
13281 if (NestedLoopCount == 0)
13285 "omp for loop exprs were not built");
13289 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
13299 auto *CS = cast<CapturedStmt>(AStmt);
13306 for (
int ThisCaptureLevel =
13308 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13324 VarsWithImplicitDSA, B);
13325 if (NestedLoopCount == 0)
13329 "omp for loop exprs were not built");
13334 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13347 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13356 auto *CS = cast<CapturedStmt>(AStmt);
13364 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13377 unsigned NestedLoopCount =
13379 nullptr , CS, *
this,
13380 *
DSAStack, VarsWithImplicitDSA, B);
13381 if (NestedLoopCount == 0)
13385 "omp for loop exprs were not built");
13390 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13403 NestedLoopCount, Clauses, AStmt, B);
13412 auto *CS = cast<CapturedStmt>(AStmt);
13420 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13437 if (NestedLoopCount == 0)
13441 "omp target parallel for simd loop exprs were not built");
13446 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13458 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13467 auto *CS = cast<CapturedStmt>(AStmt);
13475 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13488 unsigned NestedLoopCount =
13491 VarsWithImplicitDSA, B);
13492 if (NestedLoopCount == 0)
13496 "omp target simd loop exprs were not built");
13501 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13514 NestedLoopCount, Clauses, AStmt, B);
13523 auto *CS = cast<CapturedStmt>(AStmt);
13531 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13544 unsigned NestedLoopCount =
13546 nullptr , CS, *
this,
13547 *
DSAStack, VarsWithImplicitDSA, B);
13548 if (NestedLoopCount == 0)
13552 "omp teams distribute loop exprs were not built");
13556 DSAStack->setParentTeamsRegionLoc(StartLoc);
13559 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13568 auto *CS = cast<CapturedStmt>(AStmt);
13575 for (
int ThisCaptureLevel =
13577 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13593 VarsWithImplicitDSA, B);
13595 if (NestedLoopCount == 0)
13599 "omp teams distribute simd loop exprs were not built");
13604 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13617 DSAStack->setParentTeamsRegionLoc(StartLoc);
13620 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13629 auto *CS = cast<CapturedStmt>(AStmt);
13637 for (
int ThisCaptureLevel =
13639 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13655 VarsWithImplicitDSA, B);
13657 if (NestedLoopCount == 0)
13661 "omp for loop exprs were not built");
13666 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13679 DSAStack->setParentTeamsRegionLoc(StartLoc);
13682 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13691 auto *CS = cast<CapturedStmt>(AStmt);
13699 for (
int ThisCaptureLevel =
13701 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13717 VarsWithImplicitDSA, B);
13719 if (NestedLoopCount == 0)
13723 "omp for loop exprs were not built");
13727 DSAStack->setParentTeamsRegionLoc(StartLoc);
13730 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
13741 auto *CS = cast<CapturedStmt>(AStmt);
13750 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13771 auto *CS = cast<CapturedStmt>(AStmt);
13778 for (
int ThisCaptureLevel =
13780 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13796 VarsWithImplicitDSA, B);
13797 if (NestedLoopCount == 0)
13801 "omp target teams distribute loop exprs were not built");
13805 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13814 auto *CS = cast<CapturedStmt>(AStmt);
13821 for (
int ThisCaptureLevel =
13823 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13839 VarsWithImplicitDSA, B);
13840 if (NestedLoopCount == 0)
13844 "omp target teams distribute parallel for loop exprs were not built");
13849 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13859 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
13869 auto *CS = cast<CapturedStmt>(AStmt);
13877 OMPD_target_teams_distribute_parallel_for_simd);
13878 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13891 unsigned NestedLoopCount =
13894 nullptr , CS, *
this,
13895 *
DSAStack, VarsWithImplicitDSA, B);
13896 if (NestedLoopCount == 0)
13900 "omp target teams distribute parallel for simd loop exprs were not "
13906 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13919 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13928 auto *CS = cast<CapturedStmt>(AStmt);
13935 for (
int ThisCaptureLevel =
13937 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13953 VarsWithImplicitDSA, B);
13954 if (NestedLoopCount == 0)
13958 "omp target teams distribute simd loop exprs were not built");
13963 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13976 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13979 bool Sema::checkTransformableLoopNest(
13985 OriginalInits.emplace_back();
13988 [
this, &LoopHelpers, &Body, &OriginalInits,
Kind](
unsigned Cnt,
13990 VarsWithInheritedDSAType TmpDSA;
13991 unsigned SingleNumLoops =
13992 checkOpenMPLoop(Kind, nullptr, nullptr, CurStmt, *this, *DSAStack,
13993 TmpDSA, LoopHelpers[Cnt]);
13994 if (SingleNumLoops == 0)
13996 assert(SingleNumLoops == 1 &&
"Expect single loop iteration space");
13997 if (auto *For = dyn_cast<ForStmt>(CurStmt)) {
13998 OriginalInits.back().push_back(For->getInit());
13999 Body = For->getBody();
14001 assert(isa<CXXForRangeStmt>(CurStmt) &&
14002 "Expected canonical for or range-based for loops.");
14003 auto *CXXFor = cast<CXXForRangeStmt>(CurStmt);
14004 OriginalInits.back().push_back(CXXFor->getBeginStmt());
14005 Body = CXXFor->getBody();
14007 OriginalInits.emplace_back();
14011 Stmt *DependentPreInits;
14012 if (
auto *Dir = dyn_cast<OMPTileDirective>(Transform))
14013 DependentPreInits = Dir->getPreInits();
14014 else if (
auto *Dir = dyn_cast<OMPUnrollDirective>(Transform))
14015 DependentPreInits = Dir->getPreInits();
14017 llvm_unreachable(
"Unhandled loop transformation");
14018 if (!DependentPreInits)
14020 llvm::append_range(OriginalInits.back(),
14021 cast<DeclStmt>(DependentPreInits)->getDeclGroup());
14023 assert(OriginalInits.back().empty() &&
"No preinit after innermost loop");
14024 OriginalInits.pop_back();
14031 auto SizesClauses =
14032 OMPExecutableDirective::getClausesOfKind<OMPSizesClause>(Clauses);
14033 if (SizesClauses.empty()) {
14046 Stmt *Body =
nullptr;
14049 if (!checkTransformableLoopNest(OMPD_tile, AStmt, NumLoops, LoopHelpers, Body,
14056 NumLoops, AStmt,
nullptr,
nullptr);
14063 FloorIndVars.resize(NumLoops);
14064 TileIndVars.resize(NumLoops);
14065 for (
unsigned I = 0; I < NumLoops; ++I) {
14068 assert(LoopHelper.
Counters.size() == 1 &&
14069 "Expect single-dimensional loop iteration space");
14070 auto *OrigCntVar = cast<DeclRefExpr>(LoopHelper.
Counters.front());
14071 std::string OrigVarName = OrigCntVar->getNameInfo().getAsString();
14078 (Twine(
".floor_") + llvm::utostr(I) +
".iv." + OrigVarName).str();
14080 buildVarDecl(*
this, {}, CntTy, FloorCntName,
nullptr, OrigCntVar);
14081 FloorIndVars[I] = FloorCntDecl;
14087 (Twine(
".tile_") + llvm::utostr(I) +
".iv." + OrigVarName).str();
14092 auto *TileCntDecl = cast<VarDecl>(IterVarRef->
getDecl());
14094 TileIndVars[I] = TileCntDecl;
14096 for (
auto &
P : OriginalInits[I]) {
14097 if (
auto *D =
P.dyn_cast<
Decl *>())
14098 PreInits.push_back(D);
14099 else if (
auto *PI = dyn_cast_or_null<DeclStmt>(
P.dyn_cast<
Stmt *>()))
14100 PreInits.append(PI->decl_begin(), PI->decl_end());
14102 if (
auto *PI = cast_or_null<DeclStmt>(LoopHelper.
PreInits))
14103 PreInits.append(PI->decl_begin(), PI->decl_end());
14106 auto *CounterDecl = cast<DeclRefExpr>(CounterRef)->getDecl();
14107 if (isa<OMPCapturedExprDecl>(CounterDecl))
14108 PreInits.push_back(CounterDecl);
14113 Stmt *Inner = Body;
14116 for (
int I = NumLoops - 1; I >= 0; --I) {
14119 auto *OrigCntVar = cast<DeclRefExpr>(LoopHelper.
Counters[0]);
14120 QualType CntTy = OrigCntVar->getType();
14126 OrigCntVar->getExprLoc());
14128 OrigCntVar->getExprLoc());
14133 Decl *CounterDecl = TileIndVars[I];
14136 OrigCntVar->getBeginLoc(), OrigCntVar->getEndLoc());
14143 BO_Add, FloorIV, DimTileSize);
14148 NumIterations, EndOfTile.
get());
14153 IsPartialTile.
get(), NumIterations, EndOfTile.
get());
14154 if (!MinTileAndIterSpace.
isUsable())
14157 BO_LT, TileIV, MinTileAndIterSpace.
get());
14181 BodyParts.append(LoopHelper.
Updates.begin(), LoopHelper.
Updates.end());
14182 BodyParts.push_back(Inner);
14184 Inner->getEndLoc());
14192 for (
int I = NumLoops - 1; I >= 0; --I) {
14193 auto &LoopHelper = LoopHelpers[I];
14194 Expr *NumIterations = LoopHelper.NumIterations;
14195 DeclRefExpr *OrigCntVar = cast<DeclRefExpr>(LoopHelper.Counters[0]);
14209 Decl *CounterDecl = FloorIndVars[I];
14218 BO_LT, FloorIV, NumIterations);
14224 BO_AddAssign, FloorIV, DimTileSize);
14230 IncrStmt.
get(), Inner, LoopHelper.Init->getBeginLoc(),
14231 LoopHelper.Init->getBeginLoc(), LoopHelper.Inc->getEndLoc());
14251 OMPExecutableDirective::getSingleClause<OMPFullClause>(Clauses);
14253 OMPExecutableDirective::getSingleClause<OMPPartialClause>(Clauses);
14254 assert(!(FullClause && PartialClause) &&
14255 "mutual exclusivity must have been checked before");
14257 constexpr
unsigned NumLoops = 1;
14258 Stmt *Body =
nullptr;
14263 if (!checkTransformableLoopNest(OMPD_unroll, AStmt, NumLoops, LoopHelpers,
14264 Body, OriginalInits))
14267 unsigned NumGeneratedLoops = PartialClause ? 1 : 0;
14272 NumGeneratedLoops,
nullptr,
nullptr);
14277 if (!VerifyPositiveIntegerConstantInClause(
14281 Diag(AStmt->
getBeginLoc(), diag::err_omp_unroll_full_variable_trip_count);
14283 <<
"#pragma omp unroll full";
14291 if (NumGeneratedLoops == 0)
14293 NumGeneratedLoops,
nullptr,
nullptr);
14339 assert(OriginalInits.size() == 1 &&
14340 "Expecting a single-dimensional loop iteration space");
14341 for (
auto &
P : OriginalInits[0]) {
14342 if (
auto *D =
P.dyn_cast<
Decl *>())
14343 PreInits.push_back(D);
14344 else if (
auto *PI = dyn_cast_or_null<DeclStmt>(
P.dyn_cast<
Stmt *>()))
14345 PreInits.append(PI->decl_begin(), PI->decl_end());
14347 if (
auto *PI = cast_or_null<DeclStmt>(LoopHelper.
PreInits))
14348 PreInits.append(PI->decl_begin(), PI->decl_end());
14351 auto *CounterDecl = cast<DeclRefExpr>(CounterRef)->getDecl();
14352 if (isa<OMPCapturedExprDecl>(CounterDecl))
14353 PreInits.push_back(CounterDecl);
14356 auto *IterationVarRef = cast<DeclRefExpr>(LoopHelper.
IterationVarRef);
14357 QualType IVTy = IterationVarRef->getType();
14358 assert(LoopHelper.
Counters.size() == 1 &&
14359 "Expecting a single-dimensional loop iteration space");
14360 auto *OrigVar = cast<DeclRefExpr>(LoopHelper.
Counters.front());
14367 FactorVal->getIntegerConstantExpr(
Context).getValue().getZExtValue();
14368 FactorLoc = FactorVal->getExprLoc();
14373 assert(Factor > 0 &&
"Expected positive unroll factor");
14374 auto MakeFactorExpr = [
this, Factor, IVTy, FactorLoc]() {
14386 std::string OrigVarName = OrigVar->getNameInfo().getAsString();
14387 std::string OuterIVName = (Twine(
".unrolled.iv.") + OrigVarName).str();
14388 std::string InnerIVName = (Twine(
".unroll_inner.iv.") + OrigVarName).str();
14390 (Twine(
".unroll_inner.tripcount.") + OrigVarName).str();
14394 buildVarDecl(*
this, {}, IVTy, OuterIVName,
nullptr, OrigVar);
14395 auto MakeOuterRef = [
this, OuterIVDecl, IVTy, OrigVarLoc]() {
14401 auto *InnerIVDecl = cast<VarDecl>(IterationVarRef->getDecl());
14403 auto MakeInnerRef = [
this, InnerIVDecl, IVTy, OrigVarLoc]() {
14409 CaptureVars CopyTransformer(*
this);
14410 auto MakeNumIterations = [&CopyTransformer, &LoopHelper]() ->
Expr * {
14431 BO_Add, MakeOuterRef(), MakeFactorExpr());
14435 BO_LE, MakeInnerRef(), EndOfTile.
get());
14440 MakeNumIterations());
14445 InnerCond1.
get(), InnerCond2.
get());
14451 UO_PreInc, MakeInnerRef());
14457 InnerBodyStmts.append(LoopHelper.
Updates.begin(), LoopHelper.
Updates.end());
14458 InnerBodyStmts.push_back(Body);
14478 LoopHintAttr *UnrollHintAttr =
14479 LoopHintAttr::CreateImplicit(
Context, LoopHintAttr::UnrollCount,
14480 LoopHintAttr::Numeric, MakeFactorExpr());
14496 MakeNumIterations());
14503 MakeOuterRef(), MakeFactorExpr());
14514 NumGeneratedLoops, OuterFor,
14527 case OMPC_num_threads:
14536 case OMPC_allocator:
14539 case OMPC_collapse:
14545 case OMPC_num_teams:
14548 case OMPC_thread_limit:
14551 case OMPC_priority:
14554 case OMPC_grainsize:
14557 case OMPC_num_tasks:
14569 case OMPC_novariants:
14572 case OMPC_nocontext:
14587 case OMPC_proc_bind:
14588 case OMPC_schedule:
14590 case OMPC_firstprivate:
14591 case OMPC_lastprivate:
14593 case OMPC_reduction:
14594 case OMPC_task_reduction:
14595 case OMPC_in_reduction:
14599 case OMPC_copyprivate:
14602 case OMPC_mergeable:
14603 case OMPC_threadprivate:
14605 case OMPC_allocate:
14622 case OMPC_dist_schedule:
14623 case OMPC_defaultmap:
14628 case OMPC_use_device_ptr:
14629 case OMPC_use_device_addr:
14630 case OMPC_is_device_ptr:
14631 case OMPC_unified_address:
14632 case OMPC_unified_shared_memory:
14633 case OMPC_reverse_offload:
14634 case OMPC_dynamic_allocators:
14635 case OMPC_atomic_default_mem_order:
14636 case OMPC_device_type:
14638 case OMPC_nontemporal:
14641 case OMPC_inclusive:
14642 case OMPC_exclusive:
14643 case OMPC_uses_allocators:
14644 case OMPC_affinity:
14648 llvm_unreachable(
"Clause is not allowed.");
14665 case OMPD_target_parallel_for_simd:
14666 if (OpenMPVersion >= 50 &&
14667 (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)) {
14668 CaptureRegion = OMPD_parallel;
14672 case OMPD_target_parallel:
14673 case OMPD_target_parallel_for:
14674 case OMPD_target_parallel_loop:
14677 if (NameModifier == OMPD_unknown || NameModifier == OMPD_parallel)
14678 CaptureRegion = OMPD_target;
14680 case OMPD_target_teams_distribute_parallel_for_simd:
14681 if (OpenMPVersion >= 50 &&
14682 (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)) {
14683 CaptureRegion = OMPD_parallel;
14687 case OMPD_target_teams_distribute_parallel_for:
14690 if (NameModifier == OMPD_unknown || NameModifier == OMPD_parallel)
14691 CaptureRegion = OMPD_teams;
14693 case OMPD_teams_distribute_parallel_for_simd:
14694 if (OpenMPVersion >= 50 &&
14695 (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)) {
14696 CaptureRegion = OMPD_parallel;
14700 case OMPD_teams_distribute_parallel_for:
14701 CaptureRegion = OMPD_teams;
14703 case OMPD_target_update:
14704 case OMPD_target_enter_data:
14705 case OMPD_target_exit_data:
14706 CaptureRegion = OMPD_task;
14708 case OMPD_parallel_master_taskloop:
14709 if (NameModifier == OMPD_unknown || NameModifier == OMPD_taskloop)
14710 CaptureRegion = OMPD_parallel;
14712 case OMPD_parallel_master_taskloop_simd:
14713 if ((OpenMPVersion <= 45 && NameModifier == OMPD_unknown) ||
14714 NameModifier == OMPD_taskloop) {
14715 CaptureRegion = OMPD_parallel;
14718 if (OpenMPVersion <= 45)
14720 if (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)
14721 CaptureRegion = OMPD_taskloop;
14723 case OMPD_parallel_for_simd:
14724 if (OpenMPVersion <= 45)
14726 if (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)
14727 CaptureRegion = OMPD_parallel;
14729 case OMPD_taskloop_simd:
14730 case OMPD_master_taskloop_simd:
14731 if (OpenMPVersion <= 45)
14733 if (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)
14734 CaptureRegion = OMPD_taskloop;
14736 case OMPD_distribute_parallel_for_simd:
14737 if (OpenMPVersion <= 45)
14739 if (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)
14740 CaptureRegion = OMPD_parallel;
14742 case OMPD_target_simd:
14743 if (OpenMPVersion >= 50 &&
14744 (NameModifier == OMPD_unknown || NameModifier == OMPD_simd))
14745 CaptureRegion = OMPD_target;
14747 case OMPD_teams_distribute_simd:
14748 case OMPD_target_teams_distribute_simd:
14749 if (OpenMPVersion >= 50 &&
14750 (NameModifier == OMPD_unknown || NameModifier == OMPD_simd))
14751 CaptureRegion = OMPD_teams;
14754 case OMPD_parallel:
14755 case OMPD_parallel_master:
14756 case OMPD_parallel_sections:
14757 case OMPD_parallel_for:
14758 case OMPD_parallel_loop:
14760 case OMPD_target_teams:
14761 case OMPD_target_teams_distribute:
14762 case OMPD_target_teams_loop:
14763 case OMPD_distribute_parallel_for:
14765 case OMPD_taskloop:
14766 case OMPD_master_taskloop:
14767 case OMPD_target_data:
14769 case OMPD_for_simd:
14770 case OMPD_distribute_simd:
14773 case OMPD_threadprivate:
14774 case OMPD_allocate:
14775 case OMPD_taskyield:
14777 case OMPD_taskwait:
14778 case OMPD_cancellation_point:
14782 case OMPD_declare_reduction:
14783 case OMPD_declare_mapper:
14784 case OMPD_declare_simd:
14785 case OMPD_declare_variant:
14786 case OMPD_begin_declare_variant:
14787 case OMPD_end_declare_variant:
14788 case OMPD_declare_target:
14789 case OMPD_end_declare_target:
14791 case OMPD_teams_loop:
14796 case OMPD_sections:
14801 case OMPD_critical:
14802 case OMPD_taskgroup:
14803 case OMPD_distribute:
14806 case OMPD_teams_distribute:
14807 case OMPD_requires:
14808 case OMPD_metadirective:
14809 llvm_unreachable(
"Unexpected OpenMP directive with if-clause");
14812 llvm_unreachable(
"Unknown OpenMP directive");
14815 case OMPC_num_threads:
14817 case OMPD_target_parallel:
14818 case OMPD_target_parallel_for:
14819 case OMPD_target_parallel_for_simd:
14820 case OMPD_target_parallel_loop:
14821 CaptureRegion = OMPD_target;
14823 case OMPD_teams_distribute_parallel_for:
14824 case OMPD_teams_distribute_parallel_for_simd:
14825 case OMPD_target_teams_distribute_parallel_for:
14826 case OMPD_target_teams_distribute_parallel_for_simd:
14827 CaptureRegion = OMPD_teams;
14829 case OMPD_parallel:
14830 case OMPD_parallel_master:
14831 case OMPD_parallel_sections:
14832 case OMPD_parallel_for:
14833 case OMPD_parallel_for_simd:
14834 case OMPD_parallel_loop:
14835 case OMPD_distribute_parallel_for:
14836 case OMPD_distribute_parallel_for_simd:
14837 case OMPD_parallel_master_taskloop:
14838 case OMPD_parallel_master_taskloop_simd:
14841 case OMPD_target_data:
14842 case OMPD_target_enter_data:
14843 case OMPD_target_exit_data:
14844 case OMPD_target_update:
14846 case OMPD_target_simd:
14847 case OMPD_target_teams:
14848 case OMPD_target_teams_distribute:
14849 case OMPD_target_teams_distribute_simd:
14852 case OMPD_taskloop:
14853 case OMPD_taskloop_simd:
14854 case OMPD_master_taskloop:
14855 case OMPD_master_taskloop_simd:
14856 case OMPD_threadprivate:
14857 case OMPD_allocate:
14858 case OMPD_taskyield:
14860 case OMPD_taskwait:
14861 case OMPD_cancellation_point:
14865 case OMPD_declare_reduction:
14866 case OMPD_declare_mapper:
14867 case OMPD_declare_simd:
14868 case OMPD_declare_variant:
14869 case OMPD_begin_declare_variant:
14870 case OMPD_end_declare_variant:
14871 case OMPD_declare_target:
14872 case OMPD_end_declare_target:
14874 case OMPD_teams_loop:
14875 case OMPD_target_teams_loop:
14881 case OMPD_for_simd:
14882 case OMPD_sections:
14887 case OMPD_critical:
14888 case OMPD_taskgroup:
14889 case OMPD_distribute:
14892 case OMPD_distribute_simd:
14893 case OMPD_teams_distribute:
14894 case OMPD_teams_distribute_simd:
14895 case OMPD_requires:
14896 case OMPD_metadirective:
14897 llvm_unreachable(
"Unexpected OpenMP directive with num_threads-clause");
14900 llvm_unreachable(
"Unknown OpenMP directive");
14903 case OMPC_num_teams:
14905 case OMPD_target_teams:
14906 case OMPD_target_teams_distribute:
14907 case OMPD_target_teams_distribute_simd:
14908 case OMPD_target_teams_distribute_parallel_for:
14909 case OMPD_target_teams_distribute_parallel_for_simd:
14910 case OMPD_target_teams_loop:
14911 CaptureRegion = OMPD_target;
14913 case OMPD_teams_distribute_parallel_for:
14914 case OMPD_teams_distribute_parallel_for_simd:
14916 case OMPD_teams_distribute:
14917 case OMPD_teams_distribute_simd:
14918 case OMPD_teams_loop:
14921 case OMPD_distribute_parallel_for:
14922 case OMPD_distribute_parallel_for_simd:
14924 case OMPD_taskloop:
14925 case OMPD_taskloop_simd:
14926 case OMPD_master_taskloop:
14927 case OMPD_master_taskloop_simd:
14928 case OMPD_parallel_master_taskloop:
14929 case OMPD_parallel_master_taskloop_simd:
14930 case OMPD_target_data:
14931 case OMPD_target_enter_data:
14932 case OMPD_target_exit_data:
14933 case OMPD_target_update:
14935 case OMPD_parallel:
14936 case OMPD_parallel_master:
14937 case OMPD_parallel_sections:
14938 case OMPD_parallel_for:
14939 case OMPD_parallel_for_simd:
14940 case OMPD_parallel_loop:
14942 case OMPD_target_simd:
14943 case OMPD_target_parallel:
14944 case OMPD_target_parallel_for:
14945 case OMPD_target_parallel_for_simd:
14946 case OMPD_target_parallel_loop:
14947 case OMPD_threadprivate:
14948 case OMPD_allocate:
14949 case OMPD_taskyield:
14951 case OMPD_taskwait:
14952 case OMPD_cancellation_point:
14956 case OMPD_declare_reduction:
14957 case OMPD_declare_mapper:
14958 case OMPD_declare_simd:
14959 case OMPD_declare_variant:
14960 case OMPD_begin_declare_variant:
14961 case OMPD_end_declare_variant:
14962 case OMPD_declare_target:
14963 case OMPD_end_declare_target:
14969 case OMPD_for_simd:
14970 case OMPD_sections:
14975 case OMPD_critical:
14976 case OMPD_taskgroup:
14977 case OMPD_distribute:
14980 case OMPD_distribute_simd:
14981 case OMPD_requires:
14982 case OMPD_metadirective:
14983 llvm_unreachable(
"Unexpected OpenMP directive with num_teams-clause");
14986 llvm_unreachable(
"Unknown OpenMP directive");
14989 case OMPC_thread_limit:
14991 case OMPD_target_teams:
14992 case OMPD_target_teams_distribute:
14993 case OMPD_target_teams_distribute_simd:
14994 case OMPD_target_teams_distribute_parallel_for:
14995 case OMPD_target_teams_distribute_parallel_for_simd:
14996 case OMPD_target_teams_loop:
14997 CaptureRegion = OMPD_target;
14999 case OMPD_teams_distribute_parallel_for:
15000 case OMPD_teams_distribute_parallel_for_simd:
15002 case OMPD_teams_distribute:
15003 case OMPD_teams_distribute_simd:
15004 case OMPD_teams_loop:
15007 case OMPD_distribute_parallel_for:
15008 case OMPD_distribute_parallel_for_simd:
15010 case OMPD_taskloop:
15011 case OMPD_taskloop_simd:
15012 case OMPD_master_taskloop:
15013 case OMPD_master_taskloop_simd:
15014 case OMPD_parallel_master_taskloop:
15015 case OMPD_parallel_master_taskloop_simd:
15016 case OMPD_target_data:
15017 case OMPD_target_enter_data:
15018 case OMPD_target_exit_data:
15019 case OMPD_target_update:
15021 case OMPD_parallel:
15022 case OMPD_parallel_master:
15023 case OMPD_parallel_sections:
15024 case OMPD_parallel_for:
15025 case OMPD_parallel_for_simd:
15026 case OMPD_parallel_loop:
15028 case OMPD_target_simd:
15029 case OMPD_target_parallel:
15030 case OMPD_target_parallel_for:
15031 case OMPD_target_parallel_for_simd:
15032 case OMPD_target_parallel_loop:
15033 case OMPD_threadprivate:
15034 case OMPD_allocate:
15035 case OMPD_taskyield:
15037 case OMPD_taskwait:
15038 case OMPD_cancellation_point:
15042 case OMPD_declare_reduction:
15043 case OMPD_declare_mapper:
15044 case OMPD_declare_simd:
15045 case OMPD_declare_variant:
15046 case OMPD_begin_declare_variant:
15047 case OMPD_end_declare_variant:
15048 case OMPD_declare_target:
15049 case OMPD_end_declare_target:
15055 case OMPD_for_simd:
15056 case OMPD_sections:
15061 case OMPD_critical:
15062 case OMPD_taskgroup:
15063 case OMPD_distribute:
15066 case OMPD_distribute_simd:
15067 case OMPD_requires:
15068 case OMPD_metadirective:
15069 llvm_unreachable(
"Unexpected OpenMP directive with thread_limit-clause");
15072 llvm_unreachable(
"Unknown OpenMP directive");
15075 case OMPC_schedule:
15077 case OMPD_parallel_for:
15078 case OMPD_parallel_for_simd:
15079 case OMPD_distribute_parallel_for:
15080 case OMPD_distribute_parallel_for_simd:
15081 case OMPD_teams_distribute_parallel_for:
15082 case OMPD_teams_distribute_parallel_for_simd:
15083 case OMPD_target_parallel_for:
15084 case OMPD_target_parallel_for_simd:
15085 case OMPD_target_teams_distribute_parallel_for:
15086 case OMPD_target_teams_distribute_parallel_for_simd:
15087 CaptureRegion = OMPD_parallel;
15090 case OMPD_for_simd:
15094 case OMPD_taskloop:
15095 case OMPD_taskloop_simd:
15096 case OMPD_master_taskloop:
15097 case OMPD_master_taskloop_simd:
15098 case OMPD_parallel_master_taskloop:
15099 case OMPD_parallel_master_taskloop_simd:
15100 case OMPD_target_data:
15101 case OMPD_target_enter_data:
15102 case OMPD_target_exit_data:
15103 case OMPD_target_update:
15105 case OMPD_teams_distribute:
15106 case OMPD_teams_distribute_simd:
15107 case OMPD_target_teams_distribute:
15108 case OMPD_target_teams_distribute_simd:
15110 case OMPD_target_simd:
15111 case OMPD_target_parallel:
15113 case OMPD_parallel:
15114 case OMPD_parallel_master:
15115 case OMPD_parallel_sections:
15116 case OMPD_threadprivate:
15117 case OMPD_allocate:
15118 case OMPD_taskyield:
15120 case OMPD_taskwait:
15121 case OMPD_cancellation_point:
15125 case OMPD_declare_reduction:
15126 case OMPD_declare_mapper:
15127 case OMPD_declare_simd:
15128 case OMPD_declare_variant:
15129 case OMPD_begin_declare_variant:
15130 case OMPD_end_declare_variant:
15131 case OMPD_declare_target:
15132 case OMPD_end_declare_target:
15134 case OMPD_teams_loop:
15135 case OMPD_target_teams_loop:
15136 case OMPD_parallel_loop:
15137 case OMPD_target_parallel_loop:
15141 case OMPD_sections:
15146 case OMPD_critical:
15147 case OMPD_taskgroup:
15148 case OMPD_distribute:
15151 case OMPD_distribute_simd:
15152 case OMPD_target_teams:
15153 case OMPD_requires:
15154 case OMPD_metadirective:
15155 llvm_unreachable(
"Unexpected OpenMP directive with schedule clause");
15158 llvm_unreachable(
"Unknown OpenMP directive");
15161 case OMPC_dist_schedule:
15163 case OMPD_teams_distribute_parallel_for:
15164 case OMPD_teams_distribute_parallel_for_simd:
15165 case OMPD_teams_distribute:
15166 case OMPD_teams_distribute_simd:
15167 case OMPD_target_teams_distribute_parallel_for:
15168 case OMPD_target_teams_distribute_parallel_for_simd:
15169 case OMPD_target_teams_distribute:
15170 case OMPD_target_teams_distribute_simd:
15171 CaptureRegion = OMPD_teams;
15173 case OMPD_distribute_parallel_for:
15174 case OMPD_distribute_parallel_for_simd:
15175 case OMPD_distribute:
15176 case OMPD_distribute_simd:
15179 case OMPD_parallel_for:
15180 case OMPD_parallel_for_simd:
15181 case OMPD_target_parallel_for_simd:
15182 case OMPD_target_parallel_for:
15184 case OMPD_taskloop:
15185 case OMPD_taskloop_simd:
15186 case OMPD_master_taskloop:
15187 case OMPD_master_taskloop_simd:
15188 case OMPD_parallel_master_taskloop:
15189 case OMPD_parallel_master_taskloop_simd:
15190 case OMPD_target_data:
15191 case OMPD_target_enter_data:
15192 case OMPD_target_exit_data:
15193 case OMPD_target_update:
15196 case OMPD_target_simd:
15197 case OMPD_target_parallel:
15199 case OMPD_parallel:
15200 case OMPD_parallel_master:
15201 case OMPD_parallel_sections:
15202 case OMPD_threadprivate:
15203 case OMPD_allocate:
15204 case OMPD_taskyield:
15206 case OMPD_taskwait:
15207 case OMPD_cancellation_point:
15211 case OMPD_declare_reduction:
15212 case OMPD_declare_mapper:
15213 case OMPD_declare_simd:
15214 case OMPD_declare_variant:
15215 case OMPD_begin_declare_variant:
15216 case OMPD_end_declare_variant:
15217 case OMPD_declare_target:
15218 case OMPD_end_declare_target:
15220 case OMPD_teams_loop:
15221 case OMPD_target_teams_loop:
15222 case OMPD_parallel_loop:
15223 case OMPD_target_parallel_loop:
15228 case OMPD_for_simd:
15229 case OMPD_sections:
15234 case OMPD_critical:
15235 case OMPD_taskgroup:
15238 case OMPD_target_teams:
15239 case OMPD_requires:
15240 case OMPD_metadirective:
15241 llvm_unreachable(
"Unexpected OpenMP directive with dist_schedule clause");
15244 llvm_unreachable(
"Unknown OpenMP directive");
15249 case OMPD_target_update:
15250 case OMPD_target_enter_data:
15251 case OMPD_target_exit_data:
15253 case OMPD_target_simd:
15254 case OMPD_target_teams:
15255 case OMPD_target_parallel:
15256 case OMPD_target_teams_distribute:
15257 case OMPD_target_teams_distribute_simd:
15258 case OMPD_target_parallel_for:
15259 case OMPD_target_parallel_for_simd:
15260 case OMPD_target_parallel_loop:
15261 case OMPD_target_teams_distribute_parallel_for:
15262 case OMPD_target_teams_distribute_parallel_for_simd:
15263 case OMPD_target_teams_loop:
15264 case OMPD_dispatch:
15265 CaptureRegion = OMPD_task;
15267 case OMPD_target_data:
15271 case OMPD_teams_distribute_parallel_for:
15272 case OMPD_teams_distribute_parallel_for_simd:
15274 case OMPD_teams_distribute:
15275 case OMPD_teams_distribute_simd:
15276 case OMPD_distribute_parallel_for:
15277 case OMPD_distribute_parallel_for_simd:
15279 case OMPD_taskloop:
15280 case OMPD_taskloop_simd:
15281 case OMPD_master_taskloop:
15282 case OMPD_master_taskloop_simd:
15283 case OMPD_parallel_master_taskloop:
15284 case OMPD_parallel_master_taskloop_simd:
15286 case OMPD_parallel:
15287 case OMPD_parallel_master:
15288 case OMPD_parallel_sections:
15289 case OMPD_parallel_for:
15290 case OMPD_parallel_for_simd:
15291 case OMPD_threadprivate:
15292 case OMPD_allocate:
15293 case OMPD_taskyield:
15295 case OMPD_taskwait:
15296 case OMPD_cancellation_point:
15300 case OMPD_declare_reduction:
15301 case OMPD_declare_mapper:
15302 case OMPD_declare_simd:
15303 case OMPD_declare_variant:
15304 case OMPD_begin_declare_variant:
15305 case OMPD_end_declare_variant:
15306 case OMPD_declare_target:
15307 case OMPD_end_declare_target:
15309 case OMPD_teams_loop:
15310 case OMPD_parallel_loop:
15315 case OMPD_for_simd:
15316 case OMPD_sections:
15321 case OMPD_critical:
15322 case OMPD_taskgroup:
15323 case OMPD_distribute:
15326 case OMPD_distribute_simd:
15327 case OMPD_requires:
15328 case OMPD_metadirective:
15329 llvm_unreachable(
"Unexpected OpenMP directive with device-clause");
15332 llvm_unreachable(
"Unknown OpenMP directive");
15335 case OMPC_grainsize:
15336 case OMPC_num_tasks:
15338 case OMPC_priority:
15341 case OMPD_taskloop:
15342 case OMPD_taskloop_simd:
15343 case OMPD_master_taskloop:
15344 case OMPD_master_taskloop_simd:
15346 case OMPD_parallel_master_taskloop:
15347 case OMPD_parallel_master_taskloop_simd:
15348 CaptureRegion = OMPD_parallel;
15350 case OMPD_target_update:
15351 case OMPD_target_enter_data:
15352 case OMPD_target_exit_data:
15354 case OMPD_target_simd:
15355 case OMPD_target_teams:
15356 case OMPD_target_parallel:
15357 case OMPD_target_teams_distribute:
15358 case OMPD_target_teams_distribute_simd:
15359 case OMPD_target_parallel_for:
15360 case OMPD_target_parallel_for_simd:
15361 case OMPD_target_teams_distribute_parallel_for:
15362 case OMPD_target_teams_distribute_parallel_for_simd:
15363 case OMPD_target_data:
15364 case OMPD_teams_distribute_parallel_for:
15365 case OMPD_teams_distribute_parallel_for_simd:
15367 case OMPD_teams_distribute:
15368 case OMPD_teams_distribute_simd:
15369 case OMPD_distribute_parallel_for:
15370 case OMPD_distribute_parallel_for_simd:
15372 case OMPD_parallel:
15373 case OMPD_parallel_master:
15374 case OMPD_parallel_sections:
15375 case OMPD_parallel_for:
15376 case OMPD_parallel_for_simd:
15377 case OMPD_threadprivate:
15378 case OMPD_allocate:
15379 case OMPD_taskyield:
15381 case OMPD_taskwait:
15382 case OMPD_cancellation_point:
15386 case OMPD_declare_reduction:
15387 case OMPD_declare_mapper:
15388 case OMPD_declare_simd:
15389 case OMPD_declare_variant:
15390 case OMPD_begin_declare_variant:
15391 case OMPD_end_declare_variant:
15392 case OMPD_declare_target:
15393 case OMPD_end_declare_target:
15395 case OMPD_teams_loop:
15396 case OMPD_target_teams_loop:
15397 case OMPD_parallel_loop:
15398 case OMPD_target_parallel_loop:
15403 case OMPD_for_simd:
15404 case OMPD_sections:
15409 case OMPD_critical:
15410 case OMPD_taskgroup:
15411 case OMPD_distribute:
15414 case OMPD_distribute_simd:
15415 case OMPD_requires:
15416 case OMPD_metadirective:
15417 llvm_unreachable(
"Unexpected OpenMP directive with grainsize-clause");
15420 llvm_unreachable(
"Unknown OpenMP directive");
15423 case OMPC_novariants:
15424 case OMPC_nocontext:
15426 case OMPD_dispatch:
15427 CaptureRegion = OMPD_task;
15430 llvm_unreachable(
"Unexpected OpenMP directive");
15437 if (DKind == OMPD_metadirective) {
15438 CaptureRegion = OMPD_metadirective;
15439 }
else if (DKind == OMPD_unknown) {
15440 llvm_unreachable(
"Unknown OpenMP directive");
15442 llvm_unreachable(
"Unexpected OpenMP directive with when clause");
15445 case OMPC_firstprivate:
15446 case OMPC_lastprivate:
15447 case OMPC_reduction:
15448 case OMPC_task_reduction:
15449 case OMPC_in_reduction:
15452 case OMPC_proc_bind:
15456 case OMPC_allocator:
15457 case OMPC_collapse:
15462 case OMPC_copyprivate:
15466 case OMPC_mergeable:
15467 case OMPC_threadprivate:
15468 case OMPC_allocate:
15487 case OMPC_defaultmap:
15492 case OMPC_use_device_ptr:
15493 case OMPC_use_device_addr:
15494 case OMPC_is_device_ptr:
15495 case OMPC_unified_address:
15496 case OMPC_unified_shared_memory:
15497 case OMPC_reverse_offload:
15498 case OMPC_dynamic_allocators:
15499 case OMPC_atomic_default_mem_order:
15500 case OMPC_device_type:
15502 case OMPC_nontemporal:
15506 case OMPC_inclusive:
15507 case OMPC_exclusive:
15508 case OMPC_uses_allocators:
15509 case OMPC_affinity:
15512 llvm_unreachable(
"Unexpected OpenMP clause.");
15514 return CaptureRegion;
15524 Stmt *HelperValStmt =
nullptr;
15527 !
Condition->isInstantiationDependent() &&
15528 !
Condition->containsUnexpandedParameterPack()) {
15533 ValExpr = Val.
get();
15537 DKind, OMPC_if,
LangOpts.OpenMP, NameModifier);
15540 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
15541 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
15547 OMPIfClause(NameModifier, ValExpr, HelperValStmt, CaptureRegion, StartLoc,
15548 LParenLoc, NameModifierLoc, ColonLoc, EndLoc);
15556 Stmt *HelperValStmt =
nullptr;
15559 !
Condition->isInstantiationDependent() &&
15560 !
Condition->containsUnexpandedParameterPack()) {
15572 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
15573 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
15579 StartLoc, LParenLoc, EndLoc);
15589 IntConvertDiagnoser()
15593 return S.
Diag(Loc, diag::err_omp_not_integral) << T;
15597 return S.
Diag(Loc, diag::err_omp_incomplete_type) << T;
15602 return S.
Diag(Loc, diag::err_omp_explicit_conversion) << T << ConvTy;
15611 return S.
Diag(Loc, diag::err_omp_ambiguous_conversion) << T;
15620 llvm_unreachable(
"conversion functions are permitted");
15622 } ConvertDiagnoser;
15628 bool StrictlyPositive,
bool BuildCapture =
false,
15631 Stmt **HelperValStmt =
nullptr) {
15637 if (
Value.isInvalid())
15640 ValExpr =
Value.get();
15644 if (Result->isSigned() &&
15645 !((!StrictlyPositive && Result->isNonNegative()) ||
15646 (StrictlyPositive && Result->isStrictlyPositive()))) {
15647 SemaRef.
Diag(Loc, diag::err_omp_negative_expression_in_clause)
15648 << getOpenMPClauseName(CKind) << (StrictlyPositive ? 1 : 0)
15657 if (*CaptureRegion != OMPD_unknown &&
15660 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
15661 ValExpr = tryBuildCapture(SemaRef, ValExpr, Captures).get();
15672 Expr *ValExpr = NumThreads;
15673 Stmt *HelperValStmt =
nullptr;
15686 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
15687 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
15692 ValExpr, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);
15697 bool StrictlyPositive,
15698 bool SuppressExprDiags) {
15707 if (SuppressExprDiags) {
15711 SuppressedDiagnoser() : VerifyICEDiagnoser(
true) {}
15714 llvm_unreachable(
"Diagnostic suppressed");
15724 if ((StrictlyPositive && !Result.isStrictlyPositive()) ||
15725 (!StrictlyPositive && !Result.isNonNegative())) {
15726 Diag(E->
getExprLoc(), diag::err_omp_negative_expression_in_clause)
15727 << getOpenMPClauseName(CKind) << (StrictlyPositive ? 1 : 0)
15731 if ((CKind == OMPC_aligned || CKind == OMPC_align) && !Result.isPowerOf2()) {
15732 Diag(E->
getExprLoc(), diag::warn_omp_alignment_not_power_of_two)
15736 if (CKind == OMPC_collapse &&
DSAStack->getAssociatedLoops() == 1)
15737 DSAStack->setAssociatedLoops(Result.getExtValue());
15738 else if (CKind == OMPC_ordered)
15739 DSAStack->setAssociatedLoops(Result.getExtValue());
15749 ExprResult Safelen = VerifyPositiveIntegerConstantInClause(Len, OMPC_safelen);
15762 ExprResult Simdlen = VerifyPositiveIntegerConstantInClause(Len, OMPC_simdlen);
15771 DSAStackTy *Stack) {
15772 QualType OMPAllocatorHandleT = Stack->getOMPAllocatorHandleT();
15773 if (!OMPAllocatorHandleT.
isNull())
15776 bool ErrorFound =
false;
15777 for (
int I = 0; I < OMPAllocateDeclAttr::OMPUserDefinedMemAlloc; ++I) {
15778 auto AllocatorKind =
static_cast<OMPAllocateDeclAttr::AllocatorTypeTy
>(I);
15779 StringRef Allocator =
15780 OMPAllocateDeclAttr::ConvertAllocatorTypeTyToStr(AllocatorKind);
15782 auto *VD = dyn_cast_or_null<ValueDecl>(
15795 if (OMPAllocatorHandleT.
isNull())
15796 OMPAllocatorHandleT = AllocatorType;
15801 Stack->setAllocator(AllocatorKind, Res.
get());
15804 S.
Diag(Loc, diag::err_omp_implied_type_not_found)
15805 <<
"omp_allocator_handle_t";
15809 Stack->setOMPAllocatorHandleT(OMPAllocatorHandleT);
15822 if (Allocator.isInvalid())
15825 DSAStack->getOMPAllocatorHandleT(),
15828 if (Allocator.isInvalid())
15844 VerifyPositiveIntegerConstantInClause(NumForLoops, OMPC_collapse);
15854 Expr *NumForLoops) {
15860 if (NumForLoops && LParenLoc.
isValid()) {
15862 VerifyPositiveIntegerConstantInClause(NumForLoops, OMPC_ordered);
15865 NumForLoops = NumForLoopsResult.
get();
15867 NumForLoops =
nullptr;
15870 Context, NumForLoops, NumForLoops ?
DSAStack->getAssociatedLoops() : 0,
15871 StartLoc, LParenLoc, EndLoc);
15872 DSAStack->setOrderedRegion(
true, NumForLoops, Clause);
15883 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
15885 case OMPC_proc_bind:
15887 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
15889 case OMPC_atomic_default_mem_order:
15892 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
15896 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
15900 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
15904 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
15908 case OMPC_num_threads:
15912 case OMPC_allocator:
15913 case OMPC_collapse:
15914 case OMPC_schedule:
15916 case OMPC_firstprivate:
15917 case OMPC_lastprivate:
15919 case OMPC_reduction:
15920 case OMPC_task_reduction:
15921 case OMPC_in_reduction:
15925 case OMPC_copyprivate:
15929 case OMPC_mergeable:
15930 case OMPC_threadprivate:
15931 case OMPC_allocate:
15948 case OMPC_num_teams:
15949 case OMPC_thread_limit:
15950 case OMPC_priority:
15951 case OMPC_grainsize:
15953 case OMPC_num_tasks:
15955 case OMPC_dist_schedule:
15956 case OMPC_defaultmap:
15961 case OMPC_use_device_ptr:
15962 case OMPC_use_device_addr:
15963 case OMPC_is_device_ptr:
15964 case OMPC_unified_address:
15965 case OMPC_unified_shared_memory:
15966 case OMPC_reverse_offload:
15967 case OMPC_dynamic_allocators:
15968 case OMPC_device_type:
15970 case OMPC_nontemporal:
15972 case OMPC_novariants:
15973 case OMPC_nocontext:
15975 case OMPC_inclusive:
15976 case OMPC_exclusive:
15977 case OMPC_uses_allocators:
15978 case OMPC_affinity:
15981 llvm_unreachable(
"Clause is not allowed.");
15990 llvm::raw_svector_ostream Out(Buffer);
15991 unsigned Skipped = Exclude.size();
15992 auto S = Exclude.begin(), E = Exclude.end();
15993 for (
unsigned I =
First; I <
Last; ++I) {
15994 if (std::find(S, E, I) != E) {
15999 if (I + Skipped + 2 ==
Last)
16001 else if (I + Skipped + 1 !=
Last)
16012 if (
Kind == OMP_DEFAULT_unknown) {
16013 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
16015 unsigned(OMP_DEFAULT_unknown))
16016 << getOpenMPClauseName(OMPC_default);
16021 case OMP_DEFAULT_none:
16022 DSAStack->setDefaultDSANone(KindKwLoc);
16024 case OMP_DEFAULT_shared:
16025 DSAStack->setDefaultDSAShared(KindKwLoc);
16027 case OMP_DEFAULT_firstprivate:
16028 DSAStack->setDefaultDSAFirstPrivate(KindKwLoc);
16031 llvm_unreachable(
"DSA unexpected in OpenMP default clause");
16043 if (
Kind == OMP_PROC_BIND_unknown) {
16044 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
16046 unsigned(OMP_PROC_BIND_master),
16049 ? OMP_PROC_BIND_primary
16050 : OMP_PROC_BIND_spread) +
16052 << getOpenMPClauseName(OMPC_proc_bind);
16055 if (
Kind == OMP_PROC_BIND_primary &&
LangOpts.OpenMP < 51)
16056 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
16058 unsigned(OMP_PROC_BIND_master),
16060 unsigned(OMP_PROC_BIND_spread) + 1)
16061 << getOpenMPClauseName(OMPC_proc_bind);
16070 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
16072 OMPC_atomic_default_mem_order, 0,
16074 << getOpenMPClauseName(OMPC_atomic_default_mem_order);
16078 LParenLoc, EndLoc);
16088 "OMPC_ORDER_unknown not greater than 0");
16089 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
16092 << getOpenMPClauseName(OMPC_order);
16105 Kind == OMPC_DEPEND_sink ||
Kind == OMPC_DEPEND_depobj) {
16107 OMPC_DEPEND_depobj};
16109 Except.push_back(OMPC_DEPEND_inoutset);
16110 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
16113 << getOpenMPClauseName(OMPC_update);
16124 for (
Expr *SizeExpr : SizeExprs) {
16125 ExprResult NumForLoopsResult = VerifyPositiveIntegerConstantInClause(
16126 SizeExpr, OMPC_sizes,
true);
16127 if (!NumForLoopsResult.
isUsable())
16131 DSAStack->setAssociatedLoops(SizeExprs.size());
16148 ExprResult FactorResult = VerifyPositiveIntegerConstantInClause(
16149 FactorExpr, OMPC_partial,
true);
16152 FactorExpr = FactorResult.
get();
16163 AlignVal = VerifyPositiveIntegerConstantInClause(A, OMPC_align);
16177 case OMPC_schedule:
16178 enum { Modifier1, Modifier2, ScheduleKind, NumberOfElements };
16179 assert(Argument.size() == NumberOfElements &&
16180 ArgumentLoc.size() == NumberOfElements);
16185 StartLoc, LParenLoc, ArgumentLoc[Modifier1], ArgumentLoc[Modifier2],
16186 ArgumentLoc[ScheduleKind], DelimLoc, EndLoc);
16189 assert(Argument.size() == 1 && ArgumentLoc.size() == 1);
16191 Expr, StartLoc, LParenLoc, ArgumentLoc.back(),
16194 case OMPC_dist_schedule:
16197 StartLoc, LParenLoc, ArgumentLoc.back(), DelimLoc, EndLoc);
16199 case OMPC_defaultmap:
16200 enum { Modifier, DefaultmapKind };
16204 StartLoc, LParenLoc, ArgumentLoc[Modifier], ArgumentLoc[DefaultmapKind],
16208 assert(Argument.size() == 1 && ArgumentLoc.size() == 1);
16211 StartLoc, LParenLoc, ArgumentLoc.back(), EndLoc);
16214 case OMPC_num_threads:
16218 case OMPC_allocator:
16219 case OMPC_collapse:
16221 case OMPC_proc_bind:
16223 case OMPC_firstprivate:
16224 case OMPC_lastprivate:
16226 case OMPC_reduction:
16227 case OMPC_task_reduction:
16228 case OMPC_in_reduction:
16232 case OMPC_copyprivate:
16236 case OMPC_mergeable:
16237 case OMPC_threadprivate:
16238 case OMPC_allocate:
16255 case OMPC_num_teams:
16256 case OMPC_thread_limit:
16257 case OMPC_priority:
16258 case OMPC_grainsize:
16260 case OMPC_num_tasks:
16266 case OMPC_use_device_ptr:
16267 case OMPC_use_device_addr:
16268 case OMPC_is_device_ptr:
16269 case OMPC_unified_address:
16270 case OMPC_unified_shared_memory:
16271 case OMPC_reverse_offload:
16272 case OMPC_dynamic_allocators:
16273 case OMPC_atomic_default_mem_order:
16274 case OMPC_device_type:
16276 case OMPC_nontemporal:
16279 case OMPC_novariants:
16280 case OMPC_nocontext:
16282 case OMPC_inclusive:
16283 case OMPC_exclusive:
16284 case OMPC_uses_allocators:
16285 case OMPC_affinity:
16289 llvm_unreachable(
"Clause is not allowed.");
16300 Excluded.push_back(M2);
16301 if (M2 == OMPC_SCHEDULE_MODIFIER_nonmonotonic)
16302 Excluded.push_back(OMPC_SCHEDULE_MODIFIER_monotonic);
16303 if (M2 == OMPC_SCHEDULE_MODIFIER_monotonic)
16304 Excluded.push_back(OMPC_SCHEDULE_MODIFIER_nonmonotonic);
16305 S.
Diag(M1Loc, diag::err_omp_unexpected_clause_value)
16310 << getOpenMPClauseName(OMPC_schedule);
16328 (M1 == OMPC_SCHEDULE_MODIFIER_monotonic &&
16329 M2 == OMPC_SCHEDULE_MODIFIER_nonmonotonic) ||
16330 (M1 == OMPC_SCHEDULE_MODIFIER_nonmonotonic &&
16331 M2 == OMPC_SCHEDULE_MODIFIER_monotonic)) {
16332 Diag(M2Loc, diag::err_omp_unexpected_schedule_modifier)
16348 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
16349 << Values << getOpenMPClauseName(OMPC_schedule);
16357 (M1 == OMPC_SCHEDULE_MODIFIER_nonmonotonic ||
16358 M2 == OMPC_SCHEDULE_MODIFIER_nonmonotonic) &&
16359 Kind != OMPC_SCHEDULE_dynamic &&
Kind != OMPC_SCHEDULE_guided) {
16360 Diag(M1 == OMPC_SCHEDULE_MODIFIER_nonmonotonic ? M1Loc : M2Loc,
16361 diag::err_omp_schedule_nonmonotonic_static);
16364 Expr *ValExpr = ChunkSize;
16365 Stmt *HelperValStmt =
nullptr;
16376 ValExpr = Val.
get();
16383 if (Result->isSigned() && !Result->isStrictlyPositive()) {
16384 Diag(ChunkSizeLoc, diag::err_omp_negative_expression_in_clause)
16389 DSAStack->getCurrentDirective(), OMPC_schedule,
16390 LangOpts.OpenMP) != OMPD_unknown &&
16393 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
16394 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
16402 ValExpr, HelperValStmt, M1, M1Loc, M2, M2Loc);
16419 case OMPC_mergeable:
16461 case OMPC_unified_address:
16464 case OMPC_unified_shared_memory:
16467 case OMPC_reverse_offload:
16470 case OMPC_dynamic_allocators:
16486 case OMPC_num_threads:
16490 case OMPC_allocator:
16491 case OMPC_collapse:
16492 case OMPC_schedule:
16494 case OMPC_firstprivate:
16495 case OMPC_lastprivate:
16497 case OMPC_reduction:
16498 case OMPC_task_reduction:
16499 case OMPC_in_reduction:
16503 case OMPC_copyprivate:
16505 case OMPC_proc_bind:
16506 case OMPC_threadprivate:
16507 case OMPC_allocate:
16513 case OMPC_num_teams:
16514 case OMPC_thread_limit:
16515 case OMPC_priority:
16516 case OMPC_grainsize:
16517 case OMPC_num_tasks:
16519 case OMPC_dist_schedule:
16520 case OMPC_defaultmap:
16525 case OMPC_use_device_ptr:
16526 case OMPC_use_device_addr:
16527 case OMPC_is_device_ptr:
16528 case OMPC_atomic_default_mem_order:
16529 case OMPC_device_type:
16531 case OMPC_nontemporal:
16533 case OMPC_novariants:
16534 case OMPC_nocontext:
16536 case OMPC_inclusive:
16537 case OMPC_exclusive:
16538 case OMPC_uses_allocators:
16539 case OMPC_affinity:
16542 llvm_unreachable(
"Clause is not allowed.");
16655 if (!
hasClauses(Clauses, OMPC_init, OMPC_use, OMPC_destroy, OMPC_nowait)) {
16656 StringRef
Expected =
"'init', 'use', 'destroy', or 'nowait'";
16657 Diag(StartLoc, diag::err_omp_no_clause_for_directive)
16658 <<
Expected << getOpenMPDirectiveName(OMPD_interop);
16671 bool HasInitClause =
false;
16672 bool IsTargetSync =
false;
16676 if (
const auto *InitClause = dyn_cast<OMPInitClause>(C)) {
16677 HasInitClause =
true;
16678 if (InitClause->getIsTargetSync())
16679 IsTargetSync =
true;
16680 }
else if (
const auto *DC = dyn_cast<OMPDependClause>(C)) {
16684 if (DependClause && HasInitClause && !IsTargetSync) {
16685 Diag(DependClause->
getBeginLoc(), diag::err_omp_interop_bad_depend_clause);
16698 if (ClauseKind == OMPC_init) {
16699 const auto *IC = cast<OMPInitClause>(C);
16700 VarLoc = IC->getVarLoc();
16701 DRE = dyn_cast_or_null<DeclRefExpr>(IC->getInteropVar());
16702 }
else if (ClauseKind == OMPC_use) {
16703 const auto *UC = cast<OMPUseClause>(C);
16704 VarLoc = UC->getVarLoc();
16705 DRE = dyn_cast_or_null<DeclRefExpr>(UC->getInteropVar());
16706 }
else if (ClauseKind == OMPC_destroy) {
16707 const auto *DC = cast<OMPDestroyClause>(C);
16708 VarLoc = DC->getVarLoc();
16709 DRE = dyn_cast_or_null<DeclRefExpr>(DC->getInteropVar());
16715 if (
const auto *VD = dyn_cast<VarDecl>(DRE->
getDecl())) {
16717 Diag(VarLoc, diag::err_omp_interop_var_multiple_actions) << VD;
16734 const auto *DRE = dyn_cast<DeclRefExpr>(InteropVarExpr);
16735 if (!DRE || !isa<VarDecl>(DRE->getDecl())) {
16736 SemaRef.
Diag(VarLoc, diag::err_omp_interop_variable_expected) << 0;
16741 bool HasError =
false;
16747 if (
const auto *TD = dyn_cast<TypeDecl>(ND)) {
16748 InteropType =
QualType(TD->getTypeForDecl(), 0);
16757 SemaRef.
Diag(VarLoc, diag::err_omp_implied_type_not_found)
16758 <<
"omp_interop_t";
16764 SemaRef.
Diag(VarLoc, diag::err_omp_interop_variable_wrong_type);
16770 if ((
Kind == OMPC_init ||
Kind == OMPC_destroy) &&
16772 SemaRef.
Diag(VarLoc, diag::err_omp_interop_variable_expected)
16781 bool IsTarget,
bool IsTargetSync,
16790 for (
const Expr *E : PrefExprs) {
16796 if (isa<StringLiteral>(E))
16803 IsTargetSync, StartLoc, LParenLoc, VarLoc,
16816 OMPUseClause(InteropVar, StartLoc, LParenLoc, VarLoc, EndLoc);
16837 Stmt *HelperValStmt =
nullptr;
16840 !
Condition->isInstantiationDependent() &&
16841 !
Condition->containsUnexpandedParameterPack()) {
16853 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
16854 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
16860 ValExpr, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);
16868 Stmt *HelperValStmt =
nullptr;
16871 !
Condition->isInstantiationDependent() &&
16872 !
Condition->containsUnexpandedParameterPack()) {
16884 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
16885 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
16891 StartLoc, LParenLoc, EndLoc);
16898 Expr *ValExpr = ThreadID;
16899 Stmt *HelperValStmt =
nullptr;
16906 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
16907 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
16912 StartLoc, LParenLoc, EndLoc);
16933 case OMPC_firstprivate:
16936 case OMPC_lastprivate:
16938 "Unexpected lastprivate modifier.");
16941 ExtraModifierLoc, ColonLoc, StartLoc, LParenLoc, EndLoc);
16946 case OMPC_reduction:
16948 "Unexpected lastprivate modifier.");
16951 StartLoc, LParenLoc, ExtraModifierLoc, ColonLoc, EndLoc,
16952 ReductionOrMapperIdScopeSpec, ReductionOrMapperId);
16954 case OMPC_task_reduction:
16956 EndLoc, ReductionOrMapperIdScopeSpec,
16957 ReductionOrMapperId);
16959 case OMPC_in_reduction:
16961 EndLoc, ReductionOrMapperIdScopeSpec,
16962 ReductionOrMapperId);
16966 "Unexpected linear modifier.");
16968 VarList, DepModOrTailExpr, StartLoc, LParenLoc,
16974 LParenLoc, ColonLoc, EndLoc);
16979 case OMPC_copyprivate:
16987 "Unexpected depend modifier.");
16990 ExtraModifierLoc, ColonLoc, VarList, StartLoc, LParenLoc, EndLoc);
16994 "Unexpected map modifier.");
16996 MapTypeModifiers, MapTypeModifiersLoc, ReductionOrMapperIdScopeSpec,
16998 IsMapTypeImplicit, ExtraModifierLoc, ColonLoc, VarList, Locs);
17002 ReductionOrMapperIdScopeSpec, ReductionOrMapperId,
17003 ColonLoc, VarList, Locs);
17007 ReductionOrMapperIdScopeSpec,
17008 ReductionOrMapperId, ColonLoc, VarList, Locs);
17010 case OMPC_use_device_ptr:
17013 case OMPC_use_device_addr:
17016 case OMPC_is_device_ptr:
17019 case OMPC_allocate:
17021 LParenLoc, ColonLoc, EndLoc);
17023 case OMPC_nontemporal:
17026 case OMPC_inclusive:
17029 case OMPC_exclusive:
17032 case OMPC_affinity:
17034 DepModOrTailExpr, VarList);
17039 case OMPC_num_threads:
17043 case OMPC_allocator:
17044 case OMPC_collapse:
17046 case OMPC_proc_bind:
17047 case OMPC_schedule:
17051 case OMPC_mergeable:
17052 case OMPC_threadprivate:
17066 case OMPC_num_teams:
17067 case OMPC_thread_limit:
17068 case OMPC_priority:
17069 case OMPC_grainsize:
17071 case OMPC_num_tasks:
17073 case OMPC_dist_schedule:
17074 case OMPC_defaultmap:
17077 case OMPC_unified_address:
17078 case OMPC_unified_shared_memory:
17079 case OMPC_reverse_offload:
17080 case OMPC_dynamic_allocators:
17081 case OMPC_atomic_default_mem_order:
17082 case OMPC_device_type:
17086 case OMPC_novariants:
17087 case OMPC_nocontext:
17089 case OMPC_uses_allocators:
17093 llvm_unreachable(
"Clause is not allowed.");
17123 for (
Expr *RefExpr : VarList) {
17124 assert(RefExpr &&
"NULL expr in OpenMP private clause.");
17127 Expr *SimpleRefExpr = RefExpr;
17131 Vars.push_back(RefExpr);
17132 PrivateCopies.push_back(
nullptr);
17139 auto *VD = dyn_cast<VarDecl>(D);
17146 Type =
Type.getNonReferenceType();
17166 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
17167 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_private) {
17168 Diag(ELoc, diag::err_omp_wrong_dsa) << getOpenMPClauseName(DVar.CKind)
17169 << getOpenMPClauseName(OMPC_private);
17178 Diag(ELoc, diag::err_omp_variably_modified_type_not_supported)
17179 << getOpenMPClauseName(OMPC_private) <<
Type
17180 << getOpenMPDirectiveName(CurrDir);
17181 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
Context) ==
17184 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
17198 CurrDir == OMPD_target) {
17200 if (
DSAStack->checkMappableExprComponentListsForDecl(
17204 ConflictKind = WhereFoundClauseKind;
17207 Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
17208 << getOpenMPClauseName(OMPC_private)
17209 << getOpenMPClauseName(ConflictKind)
17210 << getOpenMPDirectiveName(CurrDir);
17229 VD ? cast<DeclRefExpr>(SimpleRefExpr) :
nullptr);
17239 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_private, Ref);
17241 ? RefExpr->IgnoreParens()
17243 PrivateCopies.push_back(VDPrivateRefExpr);
17261 bool IsImplicitClause =
17265 for (
Expr *RefExpr : VarList) {
17266 assert(RefExpr &&
"NULL expr in OpenMP firstprivate clause.");
17269 Expr *SimpleRefExpr = RefExpr;
17273 Vars.push_back(RefExpr);
17274 PrivateCopies.push_back(
nullptr);
17275 Inits.push_back(
nullptr);
17281 ELoc = IsImplicitClause ? ImplicitClauseLoc : ELoc;
17283 auto *VD = dyn_cast<VarDecl>(D);
17289 diag::err_omp_firstprivate_incomplete_type))
17291 Type =
Type.getNonReferenceType();
17300 DSAStackTy::DSAVarData TopDVar;
17301 if (!IsImplicitClause) {
17302 DSAStackTy::DSAVarData DVar =
17314 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_firstprivate &&
17316 DVar.CKind != OMPC_lastprivate) &&
17318 Diag(ELoc, diag::err_omp_wrong_dsa)
17319 << getOpenMPClauseName(DVar.CKind)
17320 << getOpenMPClauseName(OMPC_firstprivate);
17336 if (!(IsConstant || (VD && VD->isStaticDataMember())) && !DVar.RefExpr &&
17337 DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_shared) {
17338 Diag(ELoc, diag::err_omp_wrong_dsa)
17339 << getOpenMPClauseName(DVar.CKind)
17340 << getOpenMPClauseName(OMPC_firstprivate);
17364 DVar =
DSAStack->getImplicitDSA(D,
true);
17365 if (DVar.CKind != OMPC_shared &&
17368 DVar.DKind == OMPD_unknown)) {
17369 Diag(ELoc, diag::err_omp_required_access)
17370 << getOpenMPClauseName(OMPC_firstprivate)
17371 << getOpenMPClauseName(OMPC_shared);
17391 return C == OMPC_reduction && !AppliedToPointee;
17399 if (DVar.CKind == OMPC_reduction &&
17403 Diag(ELoc, diag::err_omp_parallel_reduction_in_task_firstprivate)
17404 << getOpenMPDirectiveName(DVar.DKind);
17420 CurrDir == OMPD_target) {
17422 if (
DSAStack->checkMappableExprComponentListsForDecl(
17427 ConflictKind = WhereFoundClauseKind;
17430 Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
17431 << getOpenMPClauseName(OMPC_firstprivate)
17432 << getOpenMPClauseName(ConflictKind)
17433 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective());
17443 Diag(ELoc, diag::err_omp_variably_modified_type_not_supported)
17444 << getOpenMPClauseName(OMPC_firstprivate) <<
Type
17445 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective());
17446 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
Context) ==
17449 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
17458 VD ? cast<DeclRefExpr>(SimpleRefExpr) :
nullptr);
17464 Expr *VDInitRefExpr =
nullptr;
17474 ".firstprivate.temp");
17481 if (Result.isInvalid())
17489 ".firstprivate.temp");
17491 RefExpr->getExprLoc());
17497 if (IsImplicitClause) {
17498 Diag(RefExpr->getExprLoc(),
17499 diag::note_omp_task_predetermined_firstprivate_here);
17506 RefExpr->getExprLoc());
17509 if (TopDVar.CKind == OMPC_lastprivate) {
17510 Ref = TopDVar.PrivateCopy;
17514 ExprCaptures.push_back(Ref->
getDecl());
17517 if (!IsImplicitClause)
17518 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_firstprivate, Ref);
17520 ? RefExpr->IgnoreParens()
17522 PrivateCopies.push_back(VDPrivateRefExpr);
17523 Inits.push_back(VDInitRefExpr);
17530 Vars, PrivateCopies, Inits,
17539 assert(ColonLoc.
isValid() &&
"Colon location must be valid.");
17540 Diag(LPKindLoc, diag::err_omp_unexpected_clause_value)
17543 << getOpenMPClauseName(OMPC_lastprivate);
17553 for (
Expr *RefExpr : VarList) {
17554 assert(RefExpr &&
"NULL expr in OpenMP lastprivate clause.");
17557 Expr *SimpleRefExpr = RefExpr;
17561 Vars.push_back(RefExpr);
17562 SrcExprs.push_back(
nullptr);
17563 DstExprs.push_back(
nullptr);
17564 AssignmentOps.push_back(
nullptr);
17571 auto *VD = dyn_cast<VarDecl>(D);
17577 diag::err_omp_lastprivate_incomplete_type))
17579 Type =
Type.getNonReferenceType();
17596 Diag(ELoc, diag::err_omp_lastprivate_conditional_non_scalar);
17597 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
Context) ==
17600 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
17614 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
17615 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_lastprivate &&
17617 DVar.CKind != OMPC_firstprivate) &&
17618 (DVar.CKind != OMPC_private || DVar.RefExpr !=
nullptr)) {
17619 Diag(ELoc, diag::err_omp_wrong_dsa)
17620 << getOpenMPClauseName(DVar.CKind)
17621 << getOpenMPClauseName(OMPC_lastprivate);
17632 DSAStackTy::DSAVarData TopDVar = DVar;
17636 DVar =
DSAStack->getImplicitDSA(D,
true);
17637 if (DVar.CKind != OMPC_shared) {
17638 Diag(ELoc, diag::err_omp_required_access)
17639 << getOpenMPClauseName(OMPC_lastprivate)
17640 << getOpenMPClauseName(OMPC_shared);
17656 Type.getUnqualifiedType(),
".lastprivate.src",
17667 PseudoDstExpr, PseudoSrcExpr);
17677 if (TopDVar.CKind == OMPC_firstprivate) {
17678 Ref = TopDVar.PrivateCopy;
17682 ExprCaptures.push_back(Ref->
getDecl());
17684 if ((TopDVar.CKind == OMPC_firstprivate && !TopDVar.PrivateCopy) ||
17695 ExprPostUpdates.push_back(
17699 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_lastprivate, Ref);
17701 ? RefExpr->IgnoreParens()
17703 SrcExprs.push_back(PseudoSrcExpr);
17704 DstExprs.push_back(PseudoDstExpr);
17705 AssignmentOps.push_back(AssignmentOp.
get());
17712 Vars, SrcExprs, DstExprs, AssignmentOps,
17713 LPKind, LPKindLoc, ColonLoc,
17723 for (
Expr *RefExpr : VarList) {
17724 assert(RefExpr &&
"NULL expr in OpenMP lastprivate clause.");
17727 Expr *SimpleRefExpr = RefExpr;
17731 Vars.push_back(RefExpr);
17737 auto *VD = dyn_cast<VarDecl>(D);
17745 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
17746 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_shared &&
17748 Diag(ELoc, diag::err_omp_wrong_dsa) << getOpenMPClauseName(DVar.CKind)
17749 << getOpenMPClauseName(OMPC_shared);
17757 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_shared, Ref);
17759 ? RefExpr->IgnoreParens()
17770 class DSARefChecker :
public StmtVisitor<DSARefChecker, bool> {
17775 if (
auto *VD = dyn_cast<VarDecl>(E->
getDecl())) {
17776 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(VD,
false);
17777 if (DVar.CKind == OMPC_shared && !DVar.RefExpr)
17779 if (DVar.CKind != OMPC_unknown)
17781 DSAStackTy::DSAVarData DVarPrivate = Stack->hasDSA(
17788 return DVarPrivate.CKind != OMPC_unknown;
17792 bool VisitStmt(
Stmt *S) {
17793 for (
Stmt *Child : S->children()) {
17794 if (Child && Visit(Child))
17799 explicit DSARefChecker(DSAStackTy *S) : Stack(S) {}
17806 class TransformExprToCaptures :
public TreeTransform<TransformExprToCaptures> {
17813 : BaseTransform(SemaRef),
Field(
FieldDecl), CapturedExpr(nullptr) {}
17818 CapturedExpr =
buildCapture(SemaRef, Field, E,
false);
17819 return CapturedExpr;
17821 return BaseTransform::TransformMemberExpr(E);
17823 DeclRefExpr *getCapturedExpr() {
return CapturedExpr; }
17827 template <
typename T,
typename U>
17830 for (
U &Set : Lookups) {
17831 for (
auto *D : Set) {
17832 if (T Res = Gen(cast<ValueDecl>(D)))
17842 for (
auto RD : D->
redecls()) {
17847 auto ND = cast<NamedDecl>(RD);
17865 AssociatedClasses);
17878 for (
auto *NS : AssociatedNamespaces) {
17891 for (
auto *D : R) {
17892 auto *Underlying = D;
17893 if (
auto *USD = dyn_cast<UsingShadowDecl>(D))
17894 Underlying = USD->getTargetDecl();
17896 if (!isa<OMPDeclareReductionDecl>(Underlying) &&
17897 !isa<OMPDeclareMapperDecl>(Underlying))
17904 if (
auto *USD = dyn_cast<UsingShadowDecl>(D))
17905 Underlying = USD->getTargetDecl();
17907 Lookups.emplace_back();
17908 Lookups.back().addDecl(Underlying);
17927 S = S->getParent();
17928 }
while (S && !S->isDeclScope(D));
17930 S = S->getParent();
17931 Lookups.emplace_back();
17932 Lookups.back().append(Lookup.
begin(), Lookup.
end());
17935 }
else if (
auto *ULE =
17936 cast_or_null<UnresolvedLookupExpr>(UnresolvedReduction)) {
17938 Decl *PrevD =
nullptr;
17942 else if (
auto *DRD = dyn_cast<OMPDeclareReductionDecl>(D))
17943 Lookups.back().addDecl(DRD);
17950 filterLookupForUDReductionAndMapper<bool>(Lookups, [](
ValueDecl *D) {
17951 return !D->isInvalidDecl() &&
17952 (D->getType()->isDependentType() ||
17953 D->getType()->isInstantiationDependentType() ||
17954 D->getType()->containsUnexpandedParameterPack());
17960 ResSet.
append(Set.begin(), Set.end());
17962 ResSet.
addDecl(Set[Set.size() - 1]);
17967 true,
true, ResSet.
begin(), ResSet.
end());
17987 if (SemaRef.
isCompleteType(Loc, Ty) || TyRec->isBeingDefined() ||
17988 TyRec->getDecl()->getDefinition()) {
17991 if (Lookup.
empty()) {
17992 Lookups.emplace_back();
17993 Lookups.back().append(Lookup.
begin(), Lookup.
end());
18000 if (
auto *VD = filterLookupForUDReductionAndMapper<ValueDecl *>(
18010 if (
auto *VD = filterLookupForUDReductionAndMapper<ValueDecl *>(
18024 Loc, VD->
getType(), Ty, Paths.front(),
18034 if (ReductionIdScopeSpec.
isSet()) {
18035 SemaRef.
Diag(Loc, diag::err_omp_not_resolved_reduction_identifier)
18044 struct ReductionData {
18069 unsigned RedModifier = 0;
18070 ReductionData() =
delete;
18072 ReductionData(
unsigned Size,
unsigned Modifier = 0) : RedModifier(Modifier) {
18073 Vars.reserve(Size);
18074 Privates.reserve(Size);
18075 LHSs.reserve(Size);
18076 RHSs.reserve(Size);
18077 ReductionOps.reserve(Size);
18078 if (RedModifier == OMPC_REDUCTION_inscan) {
18079 InscanCopyOps.reserve(Size);
18080 InscanCopyArrayTemps.reserve(Size);
18081 InscanCopyArrayElems.reserve(Size);
18083 TaskgroupDescriptors.reserve(Size);
18084 ExprCaptures.reserve(Size);
18085 ExprPostUpdates.reserve(Size);
18089 void push(
Expr *Item,
Expr *ReductionOp) {
18090 Vars.emplace_back(Item);
18091 Privates.emplace_back(
nullptr);
18092 LHSs.emplace_back(
nullptr);
18093 RHSs.emplace_back(
nullptr);
18094 ReductionOps.emplace_back(ReductionOp);
18095 TaskgroupDescriptors.emplace_back(
nullptr);
18096 if (RedModifier == OMPC_REDUCTION_inscan) {
18097 InscanCopyOps.push_back(
nullptr);
18098 InscanCopyArrayTemps.push_back(
nullptr);
18099 InscanCopyArrayElems.push_back(
nullptr);
18104 Expr *TaskgroupDescriptor,
Expr *CopyOp,
Expr *CopyArrayTemp,
18105 Expr *CopyArrayElem) {
18106 Vars.emplace_back(Item);
18107 Privates.emplace_back(Private);
18108 LHSs.emplace_back(LHS);
18109 RHSs.emplace_back(RHS);
18110 ReductionOps.emplace_back(ReductionOp);
18111 TaskgroupDescriptors.emplace_back(TaskgroupDescriptor);
18112 if (RedModifier == OMPC_REDUCTION_inscan) {
18113 InscanCopyOps.push_back(CopyOp);
18114 InscanCopyArrayTemps.push_back(CopyArrayTemp);
18115 InscanCopyArrayElems.push_back(CopyArrayElem);
18117 assert(CopyOp ==
nullptr && CopyArrayTemp ==
nullptr &&
18118 CopyArrayElem ==
nullptr &&
18119 "Copy operation must be used for inscan reductions only.");
18129 if (Length ==
nullptr) {
18136 SingleElement =
true;
18137 ArraySizes.push_back(llvm::APSInt::get(1));
18143 llvm::APSInt ConstantLengthValue = Result.Val.getInt();
18144 SingleElement = (ConstantLengthValue.getSExtValue() == 1);
18145 ArraySizes.push_back(ConstantLengthValue);
18153 while (
const auto *TempOASE = dyn_cast<OMPArraySectionExpr>(
Base)) {
18154 Length = TempOASE->getLength();
18155 if (Length ==
nullptr) {
18162 ArraySizes.push_back(llvm::APSInt::get(1));
18168 llvm::APSInt ConstantLengthValue = Result.Val.getInt();
18169 if (ConstantLengthValue.getSExtValue() != 1)
18172 ArraySizes.push_back(ConstantLengthValue);
18174 Base = TempOASE->getBase()->IgnoreParenImpCasts();
18178 if (!SingleElement) {
18179 while (
const auto *TempASE = dyn_cast<ArraySubscriptExpr>(
Base)) {
18181 ArraySizes.push_back(llvm::APSInt::get(1));
18182 Base = TempASE->getBase()->IgnoreParenImpCasts();
18194 return BO_AddAssign;
18196 return BO_MulAssign;
18198 return BO_AndAssign;
18200 return BO_OrAssign;
18202 return BO_XorAssign;
18250 case OO_Array_Delete:
18259 case OO_GreaterEqual:
18261 case OO_MinusEqual:
18263 case OO_SlashEqual:
18264 case OO_PercentEqual:
18265 case OO_CaretEqual:
18269 case OO_GreaterGreater:
18270 case OO_LessLessEqual:
18271 case OO_GreaterGreaterEqual:
18272 case OO_EqualEqual:
18273 case OO_ExclaimEqual:
18276 case OO_MinusMinus:
18282 case OO_Conditional:
18285 llvm_unreachable(
"Unexpected reduction identifier");
18288 if (II->isStr(
"max"))
18290 else if (II->isStr(
"min"))
18296 if (ReductionIdScopeSpec.
isValid())
18302 auto IR = UnresolvedReductions.begin(), ER = UnresolvedReductions.end();
18303 bool FirstIter =
true;
18304 for (
Expr *RefExpr : VarList) {
18305 assert(RefExpr &&
"nullptr expr in OpenMP reduction clause.");
18313 if (!FirstIter && IR != ER)
18318 Expr *SimpleRefExpr = RefExpr;
18327 S, ELoc, ERange, Stack->getCurScope(), ReductionIdScopeSpec,
18328 ReductionId,
Type, BasePath, IR == ER ?
nullptr : *IR);
18329 Expr *ReductionOp =
nullptr;
18331 (DeclareReductionRef.
isUnset() ||
18332 isa<UnresolvedLookupExpr>(DeclareReductionRef.
get())))
18333 ReductionOp = DeclareReductionRef.
get();
18335 RD.push(RefExpr, ReductionOp);
18341 Expr *TaskgroupDescriptor =
nullptr;
18343 auto *ASE = dyn_cast<ArraySubscriptExpr>(RefExpr->IgnoreParens());
18344 auto *OASE = dyn_cast<OMPArraySectionExpr>(RefExpr->IgnoreParens());
18346 Type = ASE->getType().getNonReferenceType();
18351 Type = ATy->getElementType();
18354 Type =
Type.getNonReferenceType();
18358 auto *VD = dyn_cast<VarDecl>(D);
18364 diag::err_omp_reduction_incomplete_type))
18370 false, ASE || OASE))
18377 if (!ASE && !OASE) {
18379 VarDecl *VDDef = VD->getDefinition();
18381 DSARefChecker Check(Stack);
18382 if (Check.Visit(VDDef->
getInit())) {
18383 S.
Diag(ELoc, diag::err_omp_reduction_ref_type_arg)
18384 << getOpenMPClauseName(ClauseKind) << ERange;
18402 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(D,
false);
18403 if (DVar.CKind == OMPC_reduction) {
18404 S.
Diag(ELoc, diag::err_omp_once_referenced)
18405 << getOpenMPClauseName(ClauseKind);
18407 S.
Diag(DVar.RefExpr->getExprLoc(), diag::note_omp_referenced);
18410 if (DVar.CKind != OMPC_unknown) {
18411 S.
Diag(ELoc, diag::err_omp_wrong_dsa)
18412 << getOpenMPClauseName(DVar.CKind)
18413 << getOpenMPClauseName(OMPC_reduction);
18425 DVar = Stack->getImplicitDSA(D,
true);
18426 if (DVar.CKind != OMPC_shared) {
18427 S.
Diag(ELoc, diag::err_omp_required_access)
18428 << getOpenMPClauseName(OMPC_reduction)
18429 << getOpenMPClauseName(OMPC_shared);
18437 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(D,
false);
18438 if (DVar.CKind == OMPC_threadprivate) {
18439 S.
Diag(ELoc, diag::err_omp_wrong_dsa)
18440 << getOpenMPClauseName(DVar.CKind)
18441 << getOpenMPClauseName(OMPC_reduction);
18451 S, ELoc, ERange, Stack->getCurScope(), ReductionIdScopeSpec,
18452 ReductionId,
Type, BasePath, IR == ER ?
nullptr : *IR);
18456 (DeclareReductionRef.
isUnset() ||
18457 isa<UnresolvedLookupExpr>(DeclareReductionRef.
get()))) {
18458 RD.push(RefExpr, DeclareReductionRef.
get());
18461 if (BOK == BO_Comma && DeclareReductionRef.
isUnset()) {
18464 diag::err_omp_unknown_reduction_identifier)
18465 <<
Type << ReductionIdRange;
18477 if (DeclareReductionRef.
isUnset()) {
18478 if ((BOK == BO_GT || BOK == BO_LT) &&
18481 S.
Diag(ELoc, diag::err_omp_clause_not_arithmetic_type_arg)
18482 << getOpenMPClauseName(ClauseKind) << S.
getLangOpts().CPlusPlus;
18483 if (!ASE && !OASE) {
18484 bool IsDecl = !VD || VD->isThisDeclarationADefinition(Context) ==
18487 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
18492 if ((BOK == BO_OrAssign || BOK == BO_AndAssign || BOK == BO_XorAssign) &&
18494 S.
Diag(ELoc, diag::err_omp_clause_floating_type_arg)
18495 << getOpenMPClauseName(ClauseKind);
18496 if (!ASE && !OASE) {
18497 bool IsDecl = !VD || VD->isThisDeclarationADefinition(Context) ==
18500 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
18507 Type =
Type.getNonLValueExprType(Context).getUnqualifiedType();
18516 bool ConstantLengthOASE =
false;
18518 bool SingleElement;
18521 Context, OASE, SingleElement, ArraySizes);
18524 if (ConstantLengthOASE && !SingleElement) {
18532 if ((OASE && !ConstantLengthOASE) ||
18537 S.
Diag(ELoc, diag::err_omp_reduction_vla_unsupported) << !!OASE;
18538 S.
Diag(ELoc, diag::note_vla_unsupported);
18541 S.
targetDiag(ELoc, diag::err_omp_reduction_vla_unsupported) << !!OASE;
18542 S.
targetDiag(ELoc, diag::note_vla_unsupported);
18555 }
else if (!ASE && !OASE &&
18563 VD ? cast<DeclRefExpr>(SimpleRefExpr) :
nullptr);
18565 Expr *Init =
nullptr;
18568 if (DeclareReductionRef.
isUsable()) {
18570 auto *DRD = cast<OMPDeclareReductionDecl>(DRDRef->getDecl());
18571 if (DRD->getInitializer()) {
18597 Type = ComplexTy->getElementType();
18599 llvm::APFloat InitValue = llvm::APFloat::getAllOnesValue(
18606 llvm::APInt InitValue = llvm::APInt::getAllOnes(Size);
18629 (BOK != BO_LT) ? IsSigned ? llvm::APInt::getSignedMinValue(Size)
18630 : llvm::APInt::getMinValue(Size)
18631 : IsSigned ? llvm::APInt::getSignedMaxValue(Size)
18632 : llvm::APInt::getMaxValue(Size);
18643 llvm::APFloat InitValue = llvm::APFloat::getLargest(
18674 llvm_unreachable(
"Unexpected reduction operation");
18677 if (Init && DeclareReductionRef.
isUnset()) {
18683 }
else if (!Init) {
18693 S.
Diag(ELoc, diag::err_omp_reduction_id_not_compatible)
18694 <<
Type << ReductionIdRange;
18695 bool IsDecl = !VD || VD->isThisDeclarationADefinition(Context) ==
18698 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
18704 if (DeclareReductionRef.
isUsable()) {
18705 QualType RedTy = DeclareReductionRef.
get()->getType();
18709 if (!BasePath.empty()) {
18713 Context, PtrRedTy, CK_UncheckedDerivedToBase, LHS.
get(), &BasePath,
18716 Context, PtrRedTy, CK_UncheckedDerivedToBase, RHS.
get(), &BasePath,
18720 QualType Params[] = {PtrRedTy, PtrRedTy};
18735 CombBOK, LHSDRE, RHSDRE);
18742 if (BOK != BO_LT && BOK != BO_GT) {
18745 BO_Assign, LHSDRE, ReductionOp.
get());
18747 auto *ConditionalOp =
new (Context)
18752 BO_Assign, LHSDRE, ConditionalOp);
18765 ExprResult CopyOpRes, TempArrayRes, TempArrayElem;
18766 if (ClauseKind == OMPC_reduction &&
18767 RD.RedModifier == OMPC_REDUCTION_inscan) {
18769 CopyOpRes = S.
BuildBinOp(Stack->getCurScope(), ELoc, BO_Assign, LHSDRE,
18779 if (Stack->getCurrentDirective() == OMPD_simd ||
18817 if (ClauseKind == OMPC_in_reduction) {
18820 const Expr *ParentReductionOp =
nullptr;
18821 Expr *ParentBOKTD =
nullptr, *ParentReductionOpTD =
nullptr;
18822 DSAStackTy::DSAVarData ParentBOKDSA =
18823 Stack->getTopMostTaskgroupReductionData(D, ParentSR, ParentBOK,
18825 DSAStackTy::DSAVarData ParentReductionOpDSA =
18826 Stack->getTopMostTaskgroupReductionData(
18827 D, ParentSR, ParentReductionOp, ParentReductionOpTD);
18828 bool IsParentBOK = ParentBOKDSA.DKind != OMPD_unknown;
18829 bool IsParentReductionOp = ParentReductionOpDSA.DKind != OMPD_unknown;
18830 if ((DeclareReductionRef.
isUnset() && IsParentReductionOp) ||
18831 (DeclareReductionRef.
isUsable() && IsParentBOK) ||
18832 (IsParentBOK && BOK != ParentBOK) || IsParentReductionOp) {
18833 bool EmitError =
true;
18834 if (IsParentReductionOp && DeclareReductionRef.
isUsable()) {
18835 llvm::FoldingSetNodeID RedId, ParentRedId;
18836 ParentReductionOp->
Profile(ParentRedId, Context,
true);
18837 DeclareReductionRef.
get()->Profile(RedId, Context,
18839 EmitError = RedId != ParentRedId;
18843 diag::err_omp_reduction_identifier_mismatch)
18844 << ReductionIdRange << RefExpr->getSourceRange();
18846 diag::note_omp_previous_reduction_identifier)
18848 << (IsParentBOK ? ParentBOKDSA.RefExpr
18849 : ParentReductionOpDSA.RefExpr)
18850 ->getSourceRange();
18854 TaskgroupDescriptor = IsParentBOK ? ParentBOKTD : ParentReductionOpTD;
18861 TransformExprToCaptures RebuildToCapture(S, D);
18863 RebuildToCapture.TransformExpr(RefExpr->IgnoreParens()).get();
18864 Ref = RebuildToCapture.getCapturedExpr();
18866 VarsExpr = Ref =
buildCapture(S, D, SimpleRefExpr,
false);
18869 RD.ExprCaptures.emplace_back(Ref->
getDecl());
18875 S.
BuildBinOp(Stack->getCurScope(), ELoc, BO_Assign, SimpleRefExpr,
18880 Stack->getCurrentDirective() == OMPD_taskgroup) {
18881 S.
Diag(RefExpr->getExprLoc(),
18882 diag::err_omp_reduction_non_addressable_expression)
18883 << RefExpr->getSourceRange();
18886 RD.ExprPostUpdates.emplace_back(
18893 unsigned Modifier = RD.RedModifier;
18896 if (CurrDir == OMPD_taskgroup && ClauseKind == OMPC_task_reduction)
18897 Modifier = OMPC_REDUCTION_task;
18898 Stack->addDSA(D, RefExpr->IgnoreParens(), OMPC_reduction, Ref, Modifier,
18900 if (Modifier == OMPC_REDUCTION_task &&
18901 (CurrDir == OMPD_taskgroup ||
18905 if (DeclareReductionRef.
isUsable())
18906 Stack->addTaskgroupReductionData(D, ReductionIdRange,
18907 DeclareReductionRef.
get());
18909 Stack->addTaskgroupReductionData(D, ReductionIdRange, BOK);
18911 RD.push(VarsExpr, PrivateDRE, LHSDRE, RHSDRE, ReductionOp.
get(),
18912 TaskgroupDescriptor, CopyOpRes.
get(), TempArrayRes.
get(),
18913 TempArrayElem.
get());
18915 return RD.Vars.empty();
18925 Diag(LParenLoc, diag::err_omp_unexpected_clause_value)
18928 << getOpenMPClauseName(OMPC_reduction);
18936 if (Modifier == OMPC_REDUCTION_inscan &&
18937 (
DSAStack->getCurrentDirective() != OMPD_for &&
18938 DSAStack->getCurrentDirective() != OMPD_for_simd &&
18939 DSAStack->getCurrentDirective() != OMPD_simd &&
18940 DSAStack->getCurrentDirective() != OMPD_parallel_for &&
18941 DSAStack->getCurrentDirective() != OMPD_parallel_for_simd)) {
18942 Diag(ModifierLoc, diag::err_omp_wrong_inscan_reduction);
18946 ReductionData RD(VarList.size(), Modifier);
18948 StartLoc, LParenLoc, ColonLoc, EndLoc,
18949 ReductionIdScopeSpec, ReductionId,
18950 UnresolvedReductions, RD))
18954 Context, StartLoc, LParenLoc, ModifierLoc, ColonLoc, EndLoc, Modifier,
18956 RD.Privates, RD.LHSs, RD.RHSs, RD.ReductionOps, RD.InscanCopyOps,
18957 RD.InscanCopyArrayTemps, RD.InscanCopyArrayElems,
18967 ReductionData RD(VarList.size());
18969 StartLoc, LParenLoc, ColonLoc, EndLoc,
18970 ReductionIdScopeSpec, ReductionId,
18971 UnresolvedReductions, RD))
18975 Context, StartLoc, LParenLoc, ColonLoc, EndLoc, RD.Vars,
18977 RD.Privates, RD.LHSs, RD.RHSs, RD.ReductionOps,
18987 ReductionData RD(VarList.size());
18989 StartLoc, LParenLoc, ColonLoc, EndLoc,
18990 ReductionIdScopeSpec, ReductionId,
18991 UnresolvedReductions, RD))
18995 Context, StartLoc, LParenLoc, ColonLoc, EndLoc, RD.Vars,
18997 RD.Privates, RD.LHSs, RD.RHSs, RD.ReductionOps, RD.TaskgroupDescriptors,
19004 if ((!
LangOpts.CPlusPlus && LinKind != OMPC_LINEAR_val) ||
19006 Diag(LinLoc, diag::err_omp_wrong_linear_modifier) <<
LangOpts.CPlusPlus;
19014 bool IsDeclareSimd) {
19015 const auto *VD = dyn_cast_or_null<VarDecl>(D);
19019 if ((LinKind == OMPC_LINEAR_uval || LinKind == OMPC_LINEAR_ref) &&
19021 Diag(ELoc, diag::err_omp_wrong_linear_modifier_non_reference)
19025 Type =
Type.getNonReferenceType();
19032 if (!IsDeclareSimd &&
19037 Type =
Type.getUnqualifiedType().getCanonicalType();
19038 const auto *Ty =
Type.getTypePtrOrNull();
19039 if (!Ty || (LinKind != OMPC_LINEAR_ref && !Ty->isDependentType() &&
19040 !Ty->isIntegralType(
Context) && !Ty->isPointerType())) {
19041 Diag(ELoc, diag::err_omp_linear_expected_int_or_ptr) <<
Type;
19043 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
Context) ==
19046 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
19064 LinKind = OMPC_LINEAR_val;
19065 for (
Expr *RefExpr : VarList) {
19066 assert(RefExpr &&
"NULL expr in OpenMP linear clause.");
19069 Expr *SimpleRefExpr = RefExpr;
19073 Vars.push_back(RefExpr);
19074 Privates.push_back(
nullptr);
19075 Inits.push_back(
nullptr);
19082 auto *VD = dyn_cast<VarDecl>(D);
19088 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
19089 if (DVar.RefExpr) {
19090 Diag(ELoc, diag::err_omp_wrong_dsa) << getOpenMPClauseName(DVar.CKind)
19091 << getOpenMPClauseName(OMPC_linear);
19098 Type =
Type.getNonReferenceType().getUnqualifiedType().getCanonicalType();
19104 VD ? cast<DeclRefExpr>(SimpleRefExpr) :
nullptr);
19113 ExprCaptures.push_back(Ref->
getDecl());
19120 SimpleRefExpr, RefRes.
get());
19123 ExprPostUpdates.push_back(
19128 if (LinKind == OMPC_LINEAR_uval)
19129 InitExpr = VD ? VD->getInit() : SimpleRefExpr;
19131 InitExpr = VD ? SimpleRefExpr : Ref;
19136 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_linear, Ref);
19138 ? RefExpr->IgnoreParens()
19140 Privates.push_back(PrivateRef);
19141 Inits.push_back(InitRef);
19147 Expr *StepExpr = Step;
19148 Expr *CalcStepExpr =
nullptr;
19156 StepExpr = Val.
get();
19164 BuildBinOp(CurScope, StepLoc, BO_Assign, SaveRef.
get(), StepExpr);
19171 if (!Result->isNegative() && !Result->isStrictlyPositive())
19172 Diag(StepLoc, diag::warn_omp_linear_step_zero)
19173 << Vars[0] << (Vars.size() > 1);
19177 CalcStepExpr = CalcStep.
get();
19182 ColonLoc, EndLoc, Vars, Privates, Inits,
19183 StepExpr, CalcStepExpr,
19189 Expr *NumIterations,
Sema &SemaRef,
19190 Scope *S, DSAStackTy *Stack) {
19202 Step = cast<BinaryOperator>(CalcStep)->getLHS();
19203 bool HasErrors =
false;
19204 auto CurInit = Clause.
inits().begin();
19205 auto CurPrivate = Clause.
privates().begin();
19210 Expr *SimpleRefExpr = RefExpr;
19211 auto Res =
getPrivateItem(SemaRef, SimpleRefExpr, ELoc, ERange);
19213 if (Res.second || !D) {
19214 Updates.push_back(
nullptr);
19215 Finals.push_back(
nullptr);
19219 auto &&Info = Stack->isLoopControlVariable(D);
19226 diag::err_omp_linear_distribute_var_non_loop_iteration);
19227 Updates.push_back(
nullptr);
19228 Finals.push_back(
nullptr);
19232 Expr *InitExpr = *CurInit;
19235 auto *DE = cast<DeclRefExpr>(SimpleRefExpr);
19237 if (LinKind == OMPC_LINEAR_uval)
19238 CapturedRef = cast<VarDecl>(DE->getDecl())->getInit();
19242 DE->getType().getUnqualifiedType(), DE->getExprLoc(),
19249 SemaRef, S, RefExpr->getExprLoc(), *CurPrivate, InitExpr, IV, Step,
19260 S, RefExpr->getExprLoc(), BO_Assign, CapturedRef,
19263 Final = *CurPrivate;
19267 if (!
Update.isUsable() || !Final.isUsable()) {
19268 Updates.push_back(
nullptr);
19269 Finals.push_back(
nullptr);
19270 UsedExprs.push_back(
nullptr);
19273 Updates.push_back(
Update.get());
19274 Finals.push_back(Final.get());
19276 UsedExprs.push_back(SimpleRefExpr);
19282 UsedExprs.push_back(S);
19284 UsedExprs.append(Clause.
varlist_size() + 1 - UsedExprs.size(),
nullptr);
19295 for (
Expr *RefExpr : VarList) {
19296 assert(RefExpr &&
"NULL expr in OpenMP linear clause.");
19299 Expr *SimpleRefExpr = RefExpr;
19303 Vars.push_back(RefExpr);
19310 auto *VD = dyn_cast<VarDecl>(D);
19318 Diag(ELoc, diag::err_omp_aligned_expected_array_or_ptr)
19320 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
Context) ==
19323 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
19330 if (
const Expr *PrevRef =
DSAStack->addUniqueAligned(D, SimpleRefExpr)) {
19331 Diag(ELoc, diag::err_omp_used_in_clause_twice)
19332 << 0 << getOpenMPClauseName(OMPC_aligned) << ERange;
19333 Diag(PrevRef->getExprLoc(), diag::note_omp_explicit_dsa)
19334 << getOpenMPClauseName(OMPC_aligned);
19342 (VD || !Ref) ? RefExpr->IgnoreParens() : Ref)
19351 if (Alignment !=
nullptr) {
19353 VerifyPositiveIntegerConstantInClause(Alignment, OMPC_aligned);
19356 Alignment = AlignResult.
get();
19362 EndLoc, Vars, Alignment);
19373 for (
Expr *RefExpr : VarList) {
19374 assert(RefExpr &&
"NULL expr in OpenMP copyin clause.");
19375 if (isa<DependentScopeDeclRefExpr>(RefExpr)) {
19377 Vars.push_back(RefExpr);
19378 SrcExprs.push_back(
nullptr);
19379 DstExprs.push_back(
nullptr);
19380 AssignmentOps.push_back(
nullptr);
19389 auto *DE = dyn_cast<DeclRefExpr>(RefExpr);
19390 if (!DE || !isa<VarDecl>(DE->getDecl())) {
19391 Diag(ELoc, diag::err_omp_expected_var_name_member_expr)
19392 << 0 << RefExpr->getSourceRange();
19396 Decl *D = DE->getDecl();
19397 auto *VD = cast<VarDecl>(D);
19402 Vars.push_back(DE);
19403 SrcExprs.push_back(
nullptr);
19404 DstExprs.push_back(
nullptr);
19405 AssignmentOps.push_back(
nullptr);
19411 if (!
DSAStack->isThreadPrivate(VD)) {
19412 Diag(ELoc, diag::err_omp_required_access)
19413 << getOpenMPClauseName(OMPC_copyin)
19414 << getOpenMPDirectiveName(OMPD_threadprivate);
19429 buildVarDecl(*
this, DE->getBeginLoc(), ElemType,
".copyin.dst",
19436 BuildBinOp(
nullptr, DE->getExprLoc(), BO_Assign, PseudoDstExpr,
19445 DSAStack->addDSA(VD, DE, OMPC_copyin);
19446 Vars.push_back(DE);
19447 SrcExprs.push_back(PseudoSrcExpr);
19448 DstExprs.push_back(PseudoDstExpr);
19449 AssignmentOps.push_back(AssignmentOp.
get());
19456 SrcExprs, DstExprs, AssignmentOps);
19467 for (
Expr *RefExpr : VarList) {
19468 assert(RefExpr &&
"NULL expr in OpenMP linear clause.");
19471 Expr *SimpleRefExpr = RefExpr;
19475 Vars.push_back(RefExpr);
19476 SrcExprs.push_back(
nullptr);
19477 DstExprs.push_back(
nullptr);
19478 AssignmentOps.push_back(
nullptr);
19485 auto *VD = dyn_cast<VarDecl>(D);
19490 if (!VD || !
DSAStack->isThreadPrivate(VD)) {
19491 DSAStackTy::DSAVarData DVar =
19493 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_copyprivate &&
19495 Diag(ELoc, diag::err_omp_wrong_dsa)
19496 << getOpenMPClauseName(DVar.CKind)
19497 << getOpenMPClauseName(OMPC_copyprivate);
19505 if (DVar.CKind == OMPC_unknown) {
19506 DVar =
DSAStack->getImplicitDSA(D,
false);
19507 if (DVar.CKind == OMPC_shared) {
19508 Diag(ELoc, diag::err_omp_required_access)
19509 << getOpenMPClauseName(OMPC_copyprivate)
19510 <<
"threadprivate or private in the enclosing context";
19519 Diag(ELoc, diag::err_omp_variably_modified_type_not_supported)
19520 << getOpenMPClauseName(OMPC_copyprivate) <<
Type
19521 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective());
19522 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
Context) ==
19525 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
19545 DSAStack->getCurScope(), ELoc, BO_Assign, PseudoDstExpr, PseudoSrcExpr);
19557 VD ? RefExpr->IgnoreParens()
19559 SrcExprs.push_back(PseudoSrcExpr);
19560 DstExprs.push_back(PseudoDstExpr);
19561 AssignmentOps.push_back(AssignmentOp.
get());
19568 Vars, SrcExprs, DstExprs, AssignmentOps);
19575 if (VarList.empty())
19583 bool Diagnose =
true) {
19584 QualType OMPDependT = Stack->getOMPDependT();
19585 if (!OMPDependT.
isNull())
19591 S.
Diag(Loc, diag::err_omp_implied_type_not_found) <<
"omp_depend_t";
19594 Stack->setOMPDependT(PT.
get());
19611 (OMPDependTFound &&
19614 Diag(Depobj->
getExprLoc(), diag::err_omp_expected_omp_depend_t_lvalue)
19619 Diag(Depobj->
getExprLoc(), diag::err_omp_expected_omp_depend_t_lvalue)
19631 if (
DSAStack->getCurrentDirective() == OMPD_ordered &&
19632 DepKind != OMPC_DEPEND_source && DepKind != OMPC_DEPEND_sink) {
19633 Diag(DepLoc, diag::err_omp_unexpected_clause_value)
19634 <<
"'source' or 'sink'" << getOpenMPClauseName(OMPC_depend);
19637 if (
DSAStack->getCurrentDirective() == OMPD_taskwait &&
19638 DepKind == OMPC_DEPEND_mutexinoutset) {
19639 Diag(DepLoc, diag::err_omp_taskwait_depend_mutexinoutset_not_allowed);
19642 if ((
DSAStack->getCurrentDirective() != OMPD_ordered ||
19643 DSAStack->getCurrentDirective() == OMPD_depobj) &&
19645 DepKind == OMPC_DEPEND_sink ||
19647 DSAStack->getCurrentDirective() == OMPD_depobj) &&
19648 DepKind == OMPC_DEPEND_depobj))) {
19650 Except.push_back(OMPC_DEPEND_source);
19651 Except.push_back(OMPC_DEPEND_sink);
19652 if (
LangOpts.OpenMP < 50 ||
DSAStack->getCurrentDirective() == OMPD_depobj)
19653 Except.push_back(OMPC_DEPEND_depobj);
19655 Except.push_back(OMPC_DEPEND_inoutset);
19657 ?
"depend modifier(iterator) or "
19659 Diag(DepLoc, diag::err_omp_unexpected_clause_value)
19663 << getOpenMPClauseName(OMPC_depend);
19667 (DepKind == OMPC_DEPEND_source || DepKind == OMPC_DEPEND_sink)) {
19669 diag::err_omp_depend_sink_source_with_modifier);
19674 Diag(DepModifier->
getExprLoc(), diag::err_omp_depend_modifier_not_iterator);
19680 if (DepKind == OMPC_DEPEND_sink || DepKind == OMPC_DEPEND_source) {
19681 if (
const Expr *OrderedCountExpr =
19682 DSAStack->getParentOrderedRegionParam().first) {
19683 TotalDepCount = OrderedCountExpr->EvaluateKnownConstInt(
Context);
19684 TotalDepCount.setIsUnsigned(
true);
19687 for (
Expr *RefExpr : VarList) {
19688 assert(RefExpr &&
"NULL expr in OpenMP shared clause.");
19689 if (isa<DependentScopeDeclRefExpr>(RefExpr)) {
19691 Vars.push_back(RefExpr);
19697 if (DepKind == OMPC_DEPEND_sink) {
19698 if (
DSAStack->getParentOrderedRegionParam().first &&
19699 DepCounter >= TotalDepCount) {
19700 Diag(ELoc, diag::err_omp_depend_sink_unexpected_expr);
19714 Vars.push_back(RefExpr);
19720 Expr *LHS = SimpleExpr;
19721 Expr *RHS =
nullptr;
19722 if (
auto *BO = dyn_cast<BinaryOperator>(SimpleExpr)) {
19724 OOLoc = BO->getOperatorLoc();
19727 }
else if (
auto *OCE = dyn_cast<CXXOperatorCallExpr>(SimpleExpr)) {
19728 OOK = OCE->getOperator();
19729 OOLoc = OCE->getOperatorLoc();
19732 }
else if (
auto *MCE = dyn_cast<CXXMemberCallExpr>(SimpleExpr)) {
19733 OOK = MCE->getMethodDecl()
19736 .getCXXOverloadedOperator();
19737 OOLoc = MCE->getCallee()->getExprLoc();
19746 Vars.push_back(RefExpr);
19752 if (OOK != OO_Plus && OOK != OO_Minus && (RHS || OOK !=
OO_None)) {
19753 Diag(OOLoc, diag::err_omp_depend_sink_expected_plus_minus);
19757 ExprResult RHSRes = VerifyPositiveIntegerConstantInClause(
19758 RHS, OMPC_depend,
false);
19763 DSAStack->getParentOrderedRegionParam().first &&
19764 DepCounter !=
DSAStack->isParentLoopControlVariable(D).first) {
19766 DSAStack->getParentLoopControlVariable(DepCounter.getZExtValue());
19768 Diag(ELoc, diag::err_omp_depend_sink_expected_loop_iteration)
19771 Diag(ELoc, diag::err_omp_depend_sink_expected_loop_iteration) << 0;
19774 OpsOffs.emplace_back(RHS, OOK);
19776 bool OMPDependTFound =
LangOpts.OpenMP >= 50;
19777 if (OMPDependTFound)
19779 DepKind == OMPC_DEPEND_depobj);
19780 if (DepKind == OMPC_DEPEND_depobj) {
19784 if (!RefExpr->isValueDependent() && !RefExpr->isTypeDependent() &&
19785 !RefExpr->isInstantiationDependent() &&
19786 !RefExpr->containsUnexpandedParameterPack() &&
19787 (OMPDependTFound &&
19789 RefExpr->getType()))) {
19790 Diag(ELoc, diag::err_omp_expected_omp_depend_t_lvalue)
19791 << 0 << RefExpr->getType() << RefExpr->getSourceRange();
19794 if (!RefExpr->isLValue()) {
19795 Diag(ELoc, diag::err_omp_expected_omp_depend_t_lvalue)
19796 << 1 << RefExpr->getType() << RefExpr->getSourceRange();
19803 QualType ExprTy = RefExpr->getType().getNonReferenceType();
19804 const auto *OASE = dyn_cast<OMPArraySectionExpr>(SimpleExpr);
19809 ExprTy = ATy->getElementType();
19813 const Expr *Length = OASE->getLength();
19817 Result.Val.getInt().isZero()) {
19819 diag::err_omp_depend_zero_length_array_section_not_allowed)
19829 if (!RefExpr->isValueDependent() && !RefExpr->isTypeDependent() &&
19830 !RefExpr->isInstantiationDependent() &&
19831 !RefExpr->containsUnexpandedParameterPack() &&
19832 (!RefExpr->IgnoreParenImpCasts()->isLValue() ||
19833 (OMPDependTFound &&
19835 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
19836 << (
LangOpts.OpenMP >= 50 ? 1 : 0)
19837 << (
LangOpts.OpenMP >= 50 ? 1 : 0) << RefExpr->getSourceRange();
19841 auto *ASE = dyn_cast<ArraySubscriptExpr>(SimpleExpr);
19842 if (ASE && !ASE->getBase()->isTypeDependent() &&
19843 !ASE->getBase()->getType().getNonReferenceType()->isPointerType() &&
19844 !ASE->getBase()->getType().getNonReferenceType()->isArrayType()) {
19845 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
19846 << (
LangOpts.OpenMP >= 50 ? 1 : 0)
19847 << (
LangOpts.OpenMP >= 50 ? 1 : 0) << RefExpr->getSourceRange();
19855 RefExpr->IgnoreParenImpCasts());
19857 if (!Res.
isUsable() && !isa<OMPArraySectionExpr>(SimpleExpr) &&
19858 !isa<OMPArrayShapingExpr>(SimpleExpr)) {
19859 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
19860 << (
LangOpts.OpenMP >= 50 ? 1 : 0)
19861 << (
LangOpts.OpenMP >= 50 ? 1 : 0) << RefExpr->getSourceRange();
19866 Vars.push_back(RefExpr->IgnoreParenImpCasts());
19870 TotalDepCount > VarList.size() &&
19871 DSAStack->getParentOrderedRegionParam().first &&
19872 DSAStack->getParentLoopControlVariable(VarList.size() + 1)) {
19873 Diag(EndLoc, diag::err_omp_depend_sink_expected_loop_iteration)
19874 << 1 <<
DSAStack->getParentLoopControlVariable(VarList.size() + 1);
19876 if (DepKind != OMPC_DEPEND_source && DepKind != OMPC_DEPEND_sink &&
19881 DepModifier, DepKind, DepLoc, ColonLoc,
19882 Vars, TotalDepCount.getZExtValue());
19883 if ((DepKind == OMPC_DEPEND_sink || DepKind == OMPC_DEPEND_source) &&
19884 DSAStack->isParentOrderedRegion())
19885 DSAStack->addDoacrossDependClause(C, OpsOffs);
19895 "Unexpected device modifier in OpenMP < 50.");
19897 bool ErrorFound =
false;
19901 Diag(ModifierLoc, diag::err_omp_unexpected_clause_value)
19902 << Values << getOpenMPClauseName(OMPC_device);
19906 Expr *ValExpr = Device;
19907 Stmt *HelperValStmt =
nullptr;
19920 if (Modifier == OMPC_DEVICE_ancestor) {
19924 diag::err_omp_device_ancestor_without_requires_reverse_offload);
19934 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
19935 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
19940 OMPDeviceClause(Modifier, ValExpr, HelperValStmt, CaptureRegion, StartLoc,
19941 LParenLoc, ModifierLoc, EndLoc);
19946 bool FullCheck =
true) {
19951 SemaRef.
Diag(SL, diag::warn_omp_non_trivial_type_mapped) << QTy << SR;
19961 const auto *OASE = dyn_cast<OMPArraySectionExpr>(E);
19966 if (isa<ArraySubscriptExpr>(E) ||
19967 (OASE && OASE->getColonLocFirst().isInvalid())) {
19968 if (
const auto *ATy = dyn_cast<ConstantArrayType>(BaseQTy.
getTypePtr()))
19969 return ATy->getSize().getSExtValue() != 1;
19974 assert(OASE &&
"Expecting array section if not an array subscript.");
19975 const Expr *LowerBound = OASE->getLowerBound();
19976 const Expr *Length = OASE->getLength();
19986 if (ConstLowerBound.getSExtValue())
20001 const auto *CATy = dyn_cast<ConstantArrayType>(BaseQTy.
getTypePtr());
20010 return CATy->getSize().getSExtValue() != ConstLength.getSExtValue();
20019 const auto *OASE = dyn_cast<OMPArraySectionExpr>(E);
20023 if (isa<ArraySubscriptExpr>(E) ||
20024 (OASE && OASE->getColonLocFirst().isInvalid()))
20027 assert(OASE &&
"Expecting array section if not an array subscript.");
20028 const Expr *Length = OASE->getLength();
20034 if (
const auto *ATy = dyn_cast<ConstantArrayType>(BaseQTy.
getTypePtr()))
20035 return ATy->getSize().getSExtValue() != 1;
20046 return ConstLength.getSExtValue() != 1;
20085 class MapBaseChecker final :
public StmtVisitor<MapBaseChecker, bool> {
20090 bool IsNonContiguous =
false;
20091 bool NoDiagnose =
false;
20092 const Expr *RelevantExpr =
nullptr;
20093 bool AllowUnitySizeArraySection =
true;
20094 bool AllowWholeSizeArraySection =
true;
20095 bool AllowAnotherPtr =
true;
20099 void emitErrorMsg() {
20101 if (SemaRef.getLangOpts().OpenMP < 50) {
20103 diag::err_omp_expected_named_var_member_or_array_expression)
20106 SemaRef.Diag(ELoc, diag::err_omp_non_lvalue_in_map_or_motion_clauses)
20107 << getOpenMPClauseName(CKind) << ERange;
20113 if (!isa<VarDecl>(DRE->
getDecl())) {
20117 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
20118 RelevantExpr = DRE;
20120 Components.emplace_back(DRE, DRE->
getDecl(), IsNonContiguous);
20128 if (isa<CXXThisExpr>(BaseE)) {
20129 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
20138 SemaRef.Diag(ELoc, diag::err_omp_expected_access_to_data_field)
20152 if (FD->isBitField()) {
20154 SemaRef.Diag(ELoc, diag::err_omp_bit_fields_forbidden_in_clause)
20174 SemaRef.Diag(ELoc, diag::err_omp_union_type_not_allowed)
20178 return RelevantExpr || Visit(E);
20188 AllowUnitySizeArraySection =
false;
20189 AllowWholeSizeArraySection =
false;
20192 Components.emplace_back(ME, FD, IsNonContiguous);
20193 return RelevantExpr || Visit(E);
20201 SemaRef.Diag(ELoc, diag::err_omp_expected_base_var_name)
20205 return RelevantExpr || Visit(E);
20212 AllowWholeSizeArraySection =
false;
20218 !Result.Val.getInt().isZero()) {
20220 diag::err_omp_invalid_map_this_expr);
20222 diag::note_omp_invalid_subscript_on_this_ptr_map);
20224 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
20229 Components.emplace_back(AE,
nullptr, IsNonContiguous);
20231 return RelevantExpr || Visit(E);
20237 assert(!(SemaRef.getLangOpts().OpenMP < 50 && NoDiagnose) &&
20238 "Array sections cannot be implicitly mapped.");
20252 SemaRef.Diag(ELoc, diag::err_omp_expected_base_var_name)
20262 if (AllowWholeSizeArraySection) {
20269 if (NotWhole || IsPointer)
20270 AllowWholeSizeArraySection =
false;
20271 }
else if (DKind == OMPD_target_update &&
20272 SemaRef.getLangOpts().OpenMP >= 50) {
20273 if (IsPointer && !AllowAnotherPtr)
20274 SemaRef.Diag(ELoc, diag::err_omp_section_length_undefined)
20277 IsNonContiguous =
true;
20278 }
else if (AllowUnitySizeArraySection && NotUnity) {
20284 diag::err_array_section_does_not_specify_contiguous_storage)
20290 AllowAnotherPtr =
false;
20292 if (
const auto *TE = dyn_cast<CXXThisExpr>(E)) {
20299 diag::err_omp_invalid_map_this_expr);
20301 diag::note_omp_invalid_length_on_this_ptr_mapping);
20305 SemaRef.getASTContext()) &&
20308 diag::err_omp_invalid_map_this_expr);
20310 diag::note_omp_invalid_lower_bound_on_this_ptr_mapping);
20312 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
20317 Components.emplace_back(OASE,
nullptr,
false);
20318 return RelevantExpr || Visit(E);
20324 Components.emplace_back(E,
nullptr, IsNonContiguous);
20326 return Visit(
Base->IgnoreParenImpCasts());
20330 if (SemaRef.getLangOpts().OpenMP < 50 || !UO->
isLValue() ||
20335 if (!RelevantExpr) {
20337 Components.emplace_back(UO,
nullptr,
false);
20353 Components.emplace_back(BO,
nullptr,
false);
20356 "Either LHS or RHS have base decl inside");
20358 return RelevantExpr || Visit(
LE);
20359 return RelevantExpr || Visit(RE);
20362 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
20363 RelevantExpr = CTE;
20364 Components.emplace_back(CTE,
nullptr, IsNonContiguous);
20368 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
20369 Components.emplace_back(COCE,
nullptr, IsNonContiguous);
20378 return Visit(Source);
20380 bool VisitStmt(
Stmt *) {
20384 const Expr *getFoundBase()
const {
return RelevantExpr; }
20385 explicit MapBaseChecker(
20389 : SemaRef(SemaRef), CKind(CKind), DKind(DKind), Components(Components),
20390 NoDiagnose(NoDiagnose), ELoc(ELoc), ERange(ERange) {}
20404 MapBaseChecker Checker(SemaRef, CKind, DKind, CurComponents, NoDiagnose, ELoc,
20408 if (SemaRef.
getLangOpts().OpenMP >= 50 && !CurComponents.empty() &&
20409 (CKind == OMPC_to || CKind == OMPC_from)) {
20410 auto CI = CurComponents.rbegin();
20411 auto CE = CurComponents.rend();
20412 for (; CI != CE; ++CI) {
20414 dyn_cast<OMPArraySectionExpr>(CI->getAssociatedExpression());
20419 SemaRef.
Diag(ELoc, diag::err_array_section_does_not_specify_length)
20423 return Checker.getFoundBase();
20432 bool CurrentRegionOnly,
20443 assert(!CurComponents.empty() &&
"Map clause expression with no components!");
20444 assert(CurComponents.back().getAssociatedDeclaration() == VD &&
20445 "Map clause expression with unexpected base!");
20448 bool IsEnclosedByDataEnvironmentExpr =
false;
20449 const Expr *EnclosingExpr =
nullptr;
20451 bool FoundError = DSAS->checkMappableExprComponentListsForDecl(
20452 VD, CurrentRegionOnly,
20453 [&IsEnclosedByDataEnvironmentExpr, &SemaRef, VD, CurrentRegionOnly, ELoc,
20454 ERange, CKind, &EnclosingExpr,
20460 assert(!StackComponents.empty() &&
20461 "Map clause expression with no components!");
20462 assert(StackComponents.back().getAssociatedDeclaration() == VD &&
20463 "Map clause expression with unexpected base!");
20467 const Expr *RE = StackComponents.front().getAssociatedExpression();
20473 auto CI = CurComponents.rbegin();
20474 auto CE = CurComponents.rend();
20475 auto SI = StackComponents.rbegin();
20476 auto SE = StackComponents.rend();
20477 for (; CI != CE && SI != SE; ++CI, ++SI) {
20482 if (CurrentRegionOnly &&
20483 (isa<ArraySubscriptExpr>(CI->getAssociatedExpression()) ||
20484 isa<OMPArraySectionExpr>(CI->getAssociatedExpression()) ||
20485 isa<OMPArrayShapingExpr>(CI->getAssociatedExpression())) &&
20486 (isa<ArraySubscriptExpr>(SI->getAssociatedExpression()) ||
20487 isa<OMPArraySectionExpr>(SI->getAssociatedExpression()) ||
20488 isa<OMPArrayShapingExpr>(SI->getAssociatedExpression()))) {
20489 SemaRef.Diag(CI->getAssociatedExpression()->getExprLoc(),
20490 diag::err_omp_multiple_array_items_in_map_clause)
20491 << CI->getAssociatedExpression()->getSourceRange();
20492 SemaRef.Diag(SI->getAssociatedExpression()->getExprLoc(),
20493 diag::note_used_here)
20494 << SI->getAssociatedExpression()->getSourceRange();
20499 if (CI->getAssociatedExpression()->getStmtClass() !=
20500 SI->getAssociatedExpression()->getStmtClass())
20504 if (CI->getAssociatedDeclaration() != SI->getAssociatedDeclaration())
20510 for (; SI != SE; ++SI) {
20512 if (
const auto *ASE =
20513 dyn_cast<ArraySubscriptExpr>(SI->getAssociatedExpression())) {
20514 Type = ASE->getBase()->IgnoreParenImpCasts()->getType();
20515 }
else if (
const auto *OASE = dyn_cast<OMPArraySectionExpr>(
20516 SI->getAssociatedExpression())) {
20517 const Expr *E = OASE->getBase()->IgnoreParenImpCasts();
20519 OMPArraySectionExpr::getBaseOriginalType(E).getCanonicalType();
20520 }
else if (
const auto *OASE = dyn_cast<OMPArrayShapingExpr>(
20521 SI->getAssociatedExpression())) {
20522 Type = OASE->getBase()->getType()->getPointeeType();
20526 SemaRef, SI->getAssociatedExpression(),
Type))
20536 if (CI == CE && SI == SE) {
20537 if (CurrentRegionOnly) {
20538 if (CKind == OMPC_map) {
20539 SemaRef.
Diag(ELoc, diag::err_omp_map_shared_storage) << ERange;
20541 assert(CKind == OMPC_to || CKind == OMPC_from);
20542 SemaRef.
Diag(ELoc, diag::err_omp_once_referenced_in_target_update)
20551 IsEnclosedByDataEnvironmentExpr =
true;
20556 std::prev(CI)->getAssociatedDeclaration()->getType();
20558 std::prev(CI)->getAssociatedExpression()->getExprLoc();
20577 if (CI == CE || SI == SE) {
20580 diag::err_omp_pointer_mapped_along_with_derived_section)
20586 if (CI->getAssociatedExpression()->getStmtClass() !=
20587 SI->getAssociatedExpression()->getStmtClass() ||
20588 CI->getAssociatedDeclaration()->getCanonicalDecl() ==
20589 SI->getAssociatedDeclaration()->getCanonicalDecl()) {
20590 assert(CI != CE && SI != SE);
20591 SemaRef.
Diag(DerivedLoc, diag::err_omp_same_pointer_dereferenced)
20604 if (CurrentRegionOnly && (CI == CE || SI == SE)) {
20605 if (CKind == OMPC_map) {
20606 if (CI != CE || SI != SE) {
20610 CI != CE ? CurComponents.begin() : StackComponents.begin();
20611 auto End = CI != CE ? CurComponents.end() : StackComponents.end();
20613 while (It !=
End && !It->getAssociatedDeclaration())
20615 assert(It !=
End &&
20616 "Expected at least one component with the declaration.");
20617 if (It !=
Begin && It->getAssociatedDeclaration()
20619 .getCanonicalType()
20620 ->isAnyPointerType()) {
20621 IsEnclosedByDataEnvironmentExpr =
false;
20622 EnclosingExpr =
nullptr;
20626 SemaRef.
Diag(ELoc, diag::err_omp_map_shared_storage) << ERange;
20628 assert(CKind == OMPC_to || CKind == OMPC_from);
20629 SemaRef.
Diag(ELoc, diag::err_omp_once_referenced_in_target_update)
20639 if (!CurrentRegionOnly && SI != SE)
20640 EnclosingExpr = RE;
20644 IsEnclosedByDataEnvironmentExpr |=
20645 (!CurrentRegionOnly && CI != CE && SI == SE);
20650 if (CurrentRegionOnly)
20664 if (EnclosingExpr && !IsEnclosedByDataEnvironmentExpr) {
20666 diag::err_omp_original_storage_is_shared_and_does_not_contain)
20682 Expr *UnresolvedMapper) {
20697 while (S && !S->isDeclScope(D))
20698 S = S->getParent();
20700 S = S->getParent();
20701 Lookups.emplace_back();
20702 Lookups.back().append(Lookup.
begin(), Lookup.
end());
20705 }
else if (
auto *ULE = cast_or_null<UnresolvedLookupExpr>(UnresolvedMapper)) {
20709 auto *DMD = cast<OMPDeclareMapperDecl>(D);
20710 assert(DMD &&
"Expect valid OMPDeclareMapperDecl during instantiation.");
20711 Lookups.back().addDecl(DMD);
20719 filterLookupForUDReductionAndMapper<bool>(Lookups, [](
ValueDecl *D) {
20720 return !D->isInvalidDecl() &&
20721 (D->getType()->isDependentType() ||
20722 D->getType()->isInstantiationDependentType() ||
20723 D->getType()->containsUnexpandedParameterPack());
20729 URS.
append(Set.begin(), Set.end());
20734 false,
true, URS.
begin(), URS.
end());
20741 SemaRef.
Diag(Loc, diag::err_omp_mapper_wrong_type);
20748 if (
auto *VD = filterLookupForUDReductionAndMapper<ValueDecl *>(
20758 if (
auto *VD = filterLookupForUDReductionAndMapper<ValueDecl *>(
20781 SemaRef.
Diag(Loc, diag::err_omp_invalid_mapper)
20791 struct MappableVarListInfo {
20806 VarComponents.reserve(VarList.size());
20807 VarBaseDeclarations.reserve(VarList.size());
20825 bool IsMapTypeImplicit =
false,
bool NoDiagnose =
false) {
20827 assert((CKind == OMPC_map || CKind == OMPC_to || CKind == OMPC_from) &&
20828 "Unexpected clause kind with mappable expressions!");
20836 MapperId.
setName(DeclNames.getIdentifier(
20838 MapperId.
setLoc(StartLoc);
20842 auto UMIt = UnresolvedMappers.begin(), UMEnd = UnresolvedMappers.end();
20843 bool UpdateUMIt =
false;
20844 Expr *UnresolvedMapper =
nullptr;
20846 bool HasHoldModifier =
20847 llvm::is_contained(Modifiers, OMPC_MAP_MODIFIER_ompx_hold);
20855 for (
Expr *RE : MVLI.VarList) {
20856 assert(RE &&
"Null expr in omp to/from/map clause");
20860 if (UpdateUMIt && UMIt != UMEnd) {
20864 "Expect the size of UnresolvedMappers to match with that of VarList");
20868 UnresolvedMapper = *UMIt;
20877 SemaRef, DSAS->getCurScope(), MapperIdScopeSpec, MapperId,
20881 MVLI.UDMapperList.push_back(ER.
get());
20884 MVLI.ProcessedVarList.push_back(RE);
20893 ELoc, diag::err_omp_expected_named_var_member_or_array_expression)
20896 SemaRef.
Diag(ELoc, diag::err_omp_non_lvalue_in_map_or_motion_clauses)
20909 DSAS->getCurrentDirective(), NoDiagnose);
20913 assert(!CurComponents.empty() &&
20914 "Invalid mappable expression information.");
20916 if (
const auto *TE = dyn_cast<CXXThisExpr>(BE)) {
20918 DSAS->addMappedClassesQualTypes(TE->getType());
20921 SemaRef, DSAS->getCurScope(), MapperIdScopeSpec, MapperId,
20925 MVLI.UDMapperList.push_back(ER.
get());
20927 MVLI.ProcessedVarList.push_back(RE);
20928 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
20929 MVLI.VarComponents.back().append(CurComponents.begin(),
20930 CurComponents.end());
20931 MVLI.VarBaseDeclarations.push_back(
nullptr);
20938 CurDeclaration = CurComponents.back().getAssociatedDeclaration();
20939 assert(CurDeclaration &&
"Null decl on map clause.");
20942 "Expecting components to have associated only canonical declarations.");
20944 auto *VD = dyn_cast<VarDecl>(CurDeclaration);
20945 const auto *FD = dyn_cast<FieldDecl>(CurDeclaration);
20947 assert((VD || FD) &&
"Only variables or fields are expected here!");
20954 if (VD && DSAS->isThreadPrivate(VD)) {
20957 DSAStackTy::DSAVarData DVar = DSAS->getTopDSA(VD,
false);
20958 SemaRef.
Diag(ELoc, diag::err_omp_threadprivate_in_clause)
20959 << getOpenMPClauseName(CKind);
20973 true, CurComponents, CKind))
20975 if (CKind == OMPC_map &&
20978 false, CurComponents, CKind))
20985 auto I = llvm::find_if(
20990 assert(I != CurComponents.end() &&
"Null decl on map clause.");
20993 auto *ASE = dyn_cast<ArraySubscriptExpr>(VE->
IgnoreParens());
20994 auto *OASE = dyn_cast<OMPArraySectionExpr>(VE->
IgnoreParens());
20995 auto *OAShE = dyn_cast<OMPArrayShapingExpr>(VE->
IgnoreParens());
20997 Type = ASE->getType().getNonReferenceType();
21002 Type = ATy->getElementType();
21005 Type =
Type.getNonReferenceType();
21006 }
else if (OAShE) {
21020 if (CKind == OMPC_map) {
21026 if (DKind == OMPD_target_enter_data &&
21027 !(MapType == OMPC_MAP_to || MapType == OMPC_MAP_alloc)) {
21028 SemaRef.
Diag(StartLoc, diag::err_omp_invalid_map_type_for_directive)
21029 << (IsMapTypeImplicit ? 1 : 0)
21031 << getOpenMPDirectiveName(DKind);
21039 if (DKind == OMPD_target_exit_data &&
21040 !(MapType == OMPC_MAP_from || MapType == OMPC_MAP_release ||
21041 MapType == OMPC_MAP_delete)) {
21042 SemaRef.
Diag(StartLoc, diag::err_omp_invalid_map_type_for_directive)
21043 << (IsMapTypeImplicit ? 1 : 0)
21045 << getOpenMPDirectiveName(DKind);
21054 if ((DKind == OMPD_target_enter_data || DKind == OMPD_target_exit_data) &&
21056 SemaRef.
Diag(StartLoc,
21057 diag::err_omp_invalid_map_type_modifier_for_directive)
21059 OMPC_MAP_MODIFIER_ompx_hold)
21060 << getOpenMPDirectiveName(DKind);
21068 if ((DKind == OMPD_target_data ||
21070 !(MapType == OMPC_MAP_to || MapType == OMPC_MAP_from ||
21071 MapType == OMPC_MAP_tofrom || MapType == OMPC_MAP_alloc)) {
21072 SemaRef.
Diag(StartLoc, diag::err_omp_invalid_map_type_for_directive)
21073 << (IsMapTypeImplicit ? 1 : 0)
21075 << getOpenMPDirectiveName(DKind);
21087 if (VD && ((SemaRef.
LangOpts.OpenMP <= 45 &&
21089 DKind == OMPD_target)) {
21090 DSAStackTy::DSAVarData DVar = DSAS->getTopDSA(VD,
false);
21092 SemaRef.
Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
21093 << getOpenMPClauseName(DVar.CKind)
21094 << getOpenMPClauseName(OMPC_map)
21095 << getOpenMPDirectiveName(DSAS->getCurrentDirective());
21104 SemaRef, DSAS->getCurScope(), MapperIdScopeSpec, MapperId,
21105 Type.getCanonicalType(), UnresolvedMapper);
21108 MVLI.UDMapperList.push_back(ER.
get());
21111 MVLI.ProcessedVarList.push_back(RE);
21115 DSAS->addMappableExpressionComponents(CurDeclaration, CurComponents,
21121 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
21122 MVLI.VarComponents.back().append(CurComponents.begin(),
21123 CurComponents.end());
21124 MVLI.VarBaseDeclarations.push_back(isa<MemberExpr>(BE) ?
nullptr
21144 unsigned Count = 0;
21145 for (
unsigned I = 0, E = MapTypeModifiers.size(); I < E; ++I) {
21147 llvm::is_contained(Modifiers, MapTypeModifiers[I])) {
21148 Diag(MapTypeModifiersLoc[I], diag::err_omp_duplicate_map_type_modifier);
21152 "Modifiers exceed the allowed number of map type modifiers");
21153 Modifiers[Count] = MapTypeModifiers[I];
21154 ModifiersLoc[Count] = MapTypeModifiersLoc[I];
21158 MappableVarListInfo MVLI(VarList);
21160 MapperIdScopeSpec, MapperId, UnresolvedMappers,
21161 MapType, Modifiers, IsMapTypeImplicit,
21167 MVLI.VarBaseDeclarations, MVLI.VarComponents,
21168 MVLI.UDMapperList, Modifiers, ModifiersLoc,
21170 MapperId, MapType, IsMapTypeImplicit, MapLoc);
21178 if (ReductionType.
isNull())
21186 Diag(TyLoc, diag::err_omp_reduction_wrong_type) << 0;
21191 Diag(TyLoc, diag::err_omp_reduction_wrong_type) << 1;
21195 Diag(TyLoc, diag::err_omp_reduction_wrong_type) << 2;
21199 Diag(TyLoc, diag::err_omp_reduction_wrong_type) << 3;
21202 return ReductionType;
21207 ArrayRef<std::pair<QualType, SourceLocation>> ReductionTypes,
21210 Decls.reserve(ReductionTypes.size());
21218 llvm::DenseMap<QualType, SourceLocation> PreviousRedeclTypes;
21220 bool InCompoundScope =
true;
21221 if (S !=
nullptr) {
21230 llvm::DenseMap<OMPDeclareReductionDecl *, bool> UsedAsPrevious;
21232 while (
Filter.hasNext()) {
21233 auto *PrevDecl = cast<OMPDeclareReductionDecl>(
Filter.next());
21234 if (InCompoundScope) {
21235 auto I = UsedAsPrevious.find(PrevDecl);
21236 if (I == UsedAsPrevious.end())
21237 UsedAsPrevious[PrevDecl] =
false;
21239 UsedAsPrevious[D] =
true;
21241 PreviousRedeclTypes[PrevDecl->getType().getCanonicalType()] =
21242 PrevDecl->getLocation();
21245 if (InCompoundScope) {
21246 for (
const auto &PrevData : UsedAsPrevious) {
21247 if (!PrevData.second) {
21248 PrevDRD = PrevData.first;
21253 }
else if (PrevDeclInScope !=
nullptr) {
21254 auto *PrevDRDInScope = PrevDRD =
21255 cast<OMPDeclareReductionDecl>(PrevDeclInScope);
21257 PreviousRedeclTypes[PrevDRDInScope->getType().getCanonicalType()] =
21258 PrevDRDInScope->getLocation();
21259 PrevDRDInScope = PrevDRDInScope->getPrevDeclInScope();
21260 }
while (PrevDRDInScope !=
nullptr);
21262 for (
const auto &TyData : ReductionTypes) {
21263 const auto I = PreviousRedeclTypes.find(TyData.first.getCanonicalType());
21264 bool Invalid =
false;
21265 if (I != PreviousRedeclTypes.end()) {
21266 Diag(TyData.second, diag::err_omp_declare_reduction_redefinition)
21268 Diag(I->second, diag::note_previous_definition);
21271 PreviousRedeclTypes[TyData.first.getCanonicalType()] = TyData.second;
21273 Name, TyData.first, PrevDRD);
21275 DRD->setAccess(AS);
21276 Decls.push_back(DRD);
21278 DRD->setInvalidDecl();
21288 auto *DRD = cast<OMPDeclareReductionDecl>(D);
21303 QualType ReductionType = DRD->getType();
21320 if (S !=
nullptr) {
21324 DRD->addDecl(OmpInParm);
21325 DRD->addDecl(OmpOutParm);
21331 DRD->setCombinerData(InE, OutE);
21335 auto *DRD = cast<OMPDeclareReductionDecl>(D);
21342 if (Combiner !=
nullptr)
21343 DRD->setCombiner(Combiner);
21345 DRD->setInvalidDecl();
21349 auto *DRD = cast<OMPDeclareReductionDecl>(D);
21363 QualType ReductionType = DRD->getType();
21380 if (S !=
nullptr) {
21384 DRD->addDecl(OmpPrivParm);
21385 DRD->addDecl(OmpOrigParm);
21391 DRD->setInitializerData(OrigE, PrivE);
21392 return OmpPrivParm;
21397 auto *DRD = cast<OMPDeclareReductionDecl>(D);
21406 }
else if (OmpPrivParm->
hasInit()) {
21407 DRD->setInitializer(OmpPrivParm->
getInit(),
21412 DRD->setInvalidDecl();
21418 for (
Decl *D : DeclReductions.
get()) {
21424 D->setInvalidDecl();
21427 return DeclReductions;
21446 assert(
ParsedType.isUsable() &&
"Expect usable parsed mapper type");
21449 assert(!MapperType.
isNull() &&
"Expect valid mapper type");
21454 Diag(TyLoc, diag::err_omp_mapper_wrong_type);
21470 llvm::DenseMap<QualType, SourceLocation> PreviousRedeclTypes;
21472 bool InCompoundScope =
true;
21473 if (S !=
nullptr) {
21482 llvm::DenseMap<OMPDeclareMapperDecl *, bool> UsedAsPrevious;
21484 while (
Filter.hasNext()) {
21485 auto *PrevDecl = cast<OMPDeclareMapperDecl>(
Filter.next());
21486 if (InCompoundScope) {
21487 auto I = UsedAsPrevious.find(PrevDecl);
21488 if (I == UsedAsPrevious.end())
21489 UsedAsPrevious[PrevDecl] =
false;
21491 UsedAsPrevious[D] =
true;
21493 PreviousRedeclTypes[PrevDecl->getType().getCanonicalType()] =
21494 PrevDecl->getLocation();
21497 if (InCompoundScope) {
21498 for (
const auto &PrevData : UsedAsPrevious) {
21499 if (!PrevData.second) {
21500 PrevDMD = PrevData.first;
21505 }
else if (PrevDeclInScope) {
21506 auto *PrevDMDInScope = PrevDMD =
21507 cast<OMPDeclareMapperDecl>(PrevDeclInScope);
21509 PreviousRedeclTypes[PrevDMDInScope->getType().getCanonicalType()] =
21510 PrevDMDInScope->getLocation();
21511 PrevDMDInScope = PrevDMDInScope->getPrevDeclInScope();
21512 }
while (PrevDMDInScope !=
nullptr);
21515 bool Invalid =
false;
21516 if (I != PreviousRedeclTypes.end()) {
21517 Diag(StartLoc, diag::err_omp_declare_mapper_redefinition)
21518 << MapperType << Name;
21519 Diag(I->second, diag::note_previous_definition);
21530 ClausesWithImplicit, PrevDMD);
21535 DMD->setAccess(AS);
21537 DMD->setInvalidDecl();
21539 auto *VD = cast<DeclRefExpr>(MapperVarRef)->getDecl();
21543 DMD->setMapperVarRef(MapperVarRef);
21560 DSAStack->addDeclareMapperVarRef(E);
21565 assert(
LangOpts.OpenMP &&
"Expected OpenMP mode.");
21567 if (
const auto *DRE = cast_or_null<DeclRefExpr>(Ref)) {
21578 assert(
LangOpts.OpenMP &&
"Expected OpenMP mode.");
21579 return cast<DeclRefExpr>(
DSAStack->getDeclareMapperVarRef())->getDecl();
21586 Expr *ValExpr = NumTeams;
21587 Stmt *HelperValStmt =
nullptr;
21600 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
21601 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
21606 StartLoc, LParenLoc, EndLoc);
21613 Expr *ValExpr = ThreadLimit;
21614 Stmt *HelperValStmt =
nullptr;
21624 DKind, OMPC_thread_limit,
LangOpts.OpenMP);
21627 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
21628 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
21633 ValExpr, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);
21641 Stmt *HelperValStmt =
nullptr;
21647 ValExpr, *
this, OMPC_priority,
21649 DSAStack->getCurrentDirective(), &CaptureRegion, &HelperValStmt))
21653 StartLoc, LParenLoc, EndLoc);
21660 Expr *ValExpr = Grainsize;
21661 Stmt *HelperValStmt =
nullptr;
21668 ValExpr, *
this, OMPC_grainsize,
21670 DSAStack->getCurrentDirective(), &CaptureRegion, &HelperValStmt))
21674 StartLoc, LParenLoc, EndLoc);
21681 Expr *ValExpr = NumTasks;
21682 Stmt *HelperValStmt =
nullptr;
21689 ValExpr, *
this, OMPC_num_tasks,
21691 DSAStack->getCurrentDirective(), &CaptureRegion, &HelperValStmt))
21695 StartLoc, LParenLoc, EndLoc);
21705 VerifyPositiveIntegerConstantInClause(Hint, OMPC_hint,
false);
21714 DSAStackTy *Stack) {
21715 QualType OMPEventHandleT = Stack->getOMPEventHandleT();
21716 if (!OMPEventHandleT.
isNull())
21721 S.
Diag(Loc, diag::err_omp_implied_type_not_found) <<
"omp_event_handle_t";
21724 Stack->setOMPEventHandleT(PT.
get());
21744 auto *VD = dyn_cast_or_null<VarDecl>(Ref->
getDecl());
21754 <<
"omp_event_handle_t" << 1 << VD->
getType()
21761 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(VD,
false);
21762 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_firstprivate &&
21765 << getOpenMPClauseName(DVar.CKind)
21766 << getOpenMPClauseName(OMPC_firstprivate);
21784 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
21785 << Values << getOpenMPClauseName(OMPC_dist_schedule);
21788 Expr *ValExpr = ChunkSize;
21789 Stmt *HelperValStmt =
nullptr;
21800 ValExpr = Val.
get();
21807 if (Result->isSigned() && !Result->isStrictlyPositive()) {
21808 Diag(ChunkSizeLoc, diag::err_omp_negative_expression_in_clause)
21813 DSAStack->getCurrentDirective(), OMPC_dist_schedule,
21814 LangOpts.OpenMP) != OMPD_unknown &&
21817 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
21818 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
21826 Kind, ValExpr, HelperValStmt);
21834 if (M != OMPC_DEFAULTMAP_MODIFIER_tofrom ||
21835 Kind != OMPC_DEFAULTMAP_scalar) {
21839 if (M != OMPC_DEFAULTMAP_MODIFIER_tofrom) {
21841 OMPC_DEFAULTMAP_MODIFIER_tofrom);
21845 OMPC_DEFAULTMAP_scalar);
21849 Diag(Loc, diag::err_omp_unexpected_clause_value)
21850 <<
Value << getOpenMPClauseName(OMPC_defaultmap);
21857 if (!isDefaultmapKind || !isDefaultmapModifier) {
21858 StringRef KindValue =
"'scalar', 'aggregate', 'pointer'";
21860 StringRef ModifierValue =
"'alloc', 'from', 'to', 'tofrom', "
21861 "'firstprivate', 'none', 'default'";
21862 if (!isDefaultmapKind && isDefaultmapModifier) {
21863 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
21864 << KindValue << getOpenMPClauseName(OMPC_defaultmap);
21865 }
else if (isDefaultmapKind && !isDefaultmapModifier) {
21866 Diag(MLoc, diag::err_omp_unexpected_clause_value)
21867 << ModifierValue << getOpenMPClauseName(OMPC_defaultmap);
21869 Diag(MLoc, diag::err_omp_unexpected_clause_value)
21870 << ModifierValue << getOpenMPClauseName(OMPC_defaultmap);
21871 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
21872 << KindValue << getOpenMPClauseName(OMPC_defaultmap);
21875 StringRef ModifierValue =
21876 "'alloc', 'from', 'to', 'tofrom', "
21877 "'firstprivate', 'none', 'default', 'present'";
21878 if (!isDefaultmapKind && isDefaultmapModifier) {
21879 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
21880 << KindValue << getOpenMPClauseName(OMPC_defaultmap);
21881 }
else if (isDefaultmapKind && !isDefaultmapModifier) {
21882 Diag(MLoc, diag::err_omp_unexpected_clause_value)
21883 << ModifierValue << getOpenMPClauseName(OMPC_defaultmap);
21885 Diag(MLoc, diag::err_omp_unexpected_clause_value)
21886 << ModifierValue << getOpenMPClauseName(OMPC_defaultmap);
21887 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
21888 << KindValue << getOpenMPClauseName(OMPC_defaultmap);
21898 Diag(StartLoc, diag::err_omp_one_defaultmap_each_category);
21904 DSAStack->setDefaultDMAAttr(M, OMPC_DEFAULTMAP_aggregate, StartLoc);
21905 DSAStack->setDefaultDMAAttr(M, OMPC_DEFAULTMAP_scalar, StartLoc);
21906 DSAStack->setDefaultDMAAttr(M, OMPC_DEFAULTMAP_pointer, StartLoc);
21916 DeclareTargetContextInfo &DTCI) {
21921 !isa<CXXRecordDecl>(CurLexicalContext) &&
21922 !isa<ClassTemplateDecl>(CurLexicalContext) &&
21923 !isa<ClassTemplatePartialSpecializationDecl>(CurLexicalContext) &&
21924 !isa<ClassTemplateSpecializationDecl>(CurLexicalContext)) {
21925 Diag(DTCI.Loc, diag::err_omp_region_not_file_context);
21928 DeclareTargetNesting.push_back(DTCI);
21932 const Sema::DeclareTargetContextInfo
21934 assert(!DeclareTargetNesting.empty() &&
21935 "check isInOpenMPDeclareTargetContext() first!");
21936 return DeclareTargetNesting.pop_back_val();
21940 DeclareTargetContextInfo &DTCI) {
21941 for (
auto &It : DTCI.ExplicitlyMapped)
21956 VarOrFuncDeclFilterCCC CCC(*
this);
21966 Diag(
Id.getLoc(), diag::err_undeclared_var_use) <<
Id.getName();
21971 if (!isa<VarDecl>(ND) && !isa<FunctionDecl>(ND) &&
21972 !isa<FunctionTemplateDecl>(ND)) {
21973 Diag(
Id.getLoc(), diag::err_omp_invalid_target_decl) <<
Id.getName();
21980 OMPDeclareTargetDeclAttr::MapTypeTy MT,
21981 DeclareTargetContextInfo &DTCI) {
21982 assert((isa<VarDecl>(ND) || isa<FunctionDecl>(ND) ||
21983 isa<FunctionTemplateDecl>(ND)) &&
21984 "Expected variable, function or function template.");
21990 Diag(Loc, diag::warn_omp_declare_target_after_first_use);
21993 const unsigned Level = -1;
21995 auto *VD = cast<ValueDecl>(ND);
21997 OMPDeclareTargetDeclAttr::getActiveAttr(VD);
21998 if (ActiveAttr.hasValue() && ActiveAttr.getValue()->getDevType() != DTCI.DT &&
21999 ActiveAttr.getValue()->getLevel() ==
Level) {
22000 Diag(Loc, diag::err_omp_device_type_mismatch)
22001 << OMPDeclareTargetDeclAttr::ConvertDevTypeTyToStr(DTCI.DT)
22002 << OMPDeclareTargetDeclAttr::ConvertDevTypeTyToStr(
22003 ActiveAttr.getValue()->getDevType());
22006 if (ActiveAttr.hasValue() && ActiveAttr.getValue()->getMapType() != MT &&
22007 ActiveAttr.getValue()->getLevel() ==
Level) {
22008 Diag(Loc, diag::err_omp_declare_target_to_and_link) << ND;
22012 if (ActiveAttr.hasValue() && ActiveAttr.getValue()->getLevel() ==
Level)
22015 Expr *IndirectE =
nullptr;
22016 bool IsIndirect =
false;
22017 if (DTCI.Indirect.hasValue()) {
22018 IndirectE = DTCI.Indirect.getValue();
22022 auto *A = OMPDeclareTargetDeclAttr::CreateImplicit(
22027 ML->DeclarationMarkedOpenMPDeclareTarget(ND, A);
22033 if (!D || !isa<VarDecl>(D))
22035 auto *VD = cast<VarDecl>(D);
22037 OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD);
22038 if (SemaRef.
LangOpts.OpenMP >= 50 &&
22041 VD->hasGlobalStorage()) {
22042 if (!
MapTy || *
MapTy != OMPDeclareTargetDeclAttr::MT_To) {
22049 diag::err_omp_lambda_capture_in_declare_target_not_to);
22050 SemaRef.
Diag(SL, diag::note_var_explicitly_captured_here)
22055 if (
MapTy.hasValue())
22057 SemaRef.
Diag(VD->
getLocation(), diag::warn_omp_not_in_target_context);
22058 SemaRef.
Diag(SL, diag::note_used_here) << SR;
22062 Sema &SemaRef, DSAStackTy *Stack,
22064 return OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD) ||
22075 if (
auto *VD = dyn_cast<VarDecl>(D)) {
22077 if (!VD->isFileVarDecl() && !VD->isStaticLocal() &&
22078 !VD->isStaticDataMember())
22082 if (
DSAStack->isThreadPrivate(VD)) {
22083 Diag(SL, diag::err_omp_threadprivate_in_target);
22088 if (
const auto *FTD = dyn_cast<FunctionTemplateDecl>(D))
22089 D = FTD->getTemplatedDecl();
22090 if (
auto *FD = dyn_cast<FunctionDecl>(D)) {
22092 OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(FD);
22093 if (IdLoc.
isValid() && Res && *Res == OMPDeclareTargetDeclAttr::MT_Link) {
22094 Diag(IdLoc, diag::err_omp_function_in_link_clause);
22095 Diag(FD->getLocation(), diag::note_defined_here) << FD;
22099 if (
auto *VD = dyn_cast<ValueDecl>(D)) {
22107 if (isa<VarDecl>(D) || isa<FunctionDecl>(D) ||
22108 isa<FunctionTemplateDecl>(D)) {
22110 OMPDeclareTargetDeclAttr::getActiveAttr(VD);
22111 unsigned Level = DeclareTargetNesting.size();
22112 if (ActiveAttr.hasValue() && ActiveAttr.getValue()->getLevel() >=
Level)
22114 DeclareTargetContextInfo &DTCI = DeclareTargetNesting.back();
22115 Expr *IndirectE =
nullptr;
22116 bool IsIndirect =
false;
22117 if (DTCI.Indirect.hasValue()) {
22118 IndirectE = DTCI.Indirect.getValue();
22122 auto *A = OMPDeclareTargetDeclAttr::CreateImplicit(
22123 Context, OMPDeclareTargetDeclAttr::MT_To, DTCI.DT, IndirectE,
22127 ML->DeclarationMarkedOpenMPDeclareTarget(D, A);
22148 unsigned Count = 0;
22149 for (
unsigned I = 0, E = MotionModifiers.size(); I < E; ++I) {
22151 llvm::is_contained(Modifiers, MotionModifiers[I])) {
22152 Diag(MotionModifiersLoc[I], diag::err_omp_duplicate_motion_modifier);
22156 "Modifiers exceed the allowed number of motion modifiers");
22157 Modifiers[Count] = MotionModifiers[I];
22158 ModifiersLoc[Count] = MotionModifiersLoc[I];
22162 MappableVarListInfo MVLI(VarList);
22164 MapperIdScopeSpec, MapperId, UnresolvedMappers);
22165 if (MVLI.ProcessedVarList.empty())
22169 Context, Locs, MVLI.ProcessedVarList, MVLI.VarBaseDeclarations,
22170 MVLI.VarComponents, MVLI.UDMapperList, Modifiers, ModifiersLoc,
22185 unsigned Count = 0;
22186 for (
unsigned I = 0, E = MotionModifiers.size(); I < E; ++I) {
22188 llvm::is_contained(Modifiers, MotionModifiers[I])) {
22189 Diag(MotionModifiersLoc[I], diag::err_omp_duplicate_motion_modifier);
22193 "Modifiers exceed the allowed number of motion modifiers");
22194 Modifiers[Count] = MotionModifiers[I];
22195 ModifiersLoc[Count] = MotionModifiersLoc[I];
22199 MappableVarListInfo MVLI(VarList);
22201 MapperIdScopeSpec, MapperId, UnresolvedMappers);
22202 if (MVLI.ProcessedVarList.empty())
22206 Context, Locs, MVLI.ProcessedVarList, MVLI.VarBaseDeclarations,
22207 MVLI.VarComponents, MVLI.UDMapperList, Modifiers, ModifiersLoc,
22213 MappableVarListInfo MVLI(VarList);
22217 for (
Expr *RefExpr : VarList) {
22218 assert(RefExpr &&
"NULL expr in OpenMP use_device_ptr clause.");
22221 Expr *SimpleRefExpr = RefExpr;
22225 MVLI.ProcessedVarList.push_back(RefExpr);
22226 PrivateCopies.push_back(
nullptr);
22227 Inits.push_back(
nullptr);
22234 Type =
Type.getNonReferenceType().getUnqualifiedType();
22236 auto *VD = dyn_cast<VarDecl>(D);
22240 Diag(ELoc, diag::err_omp_usedeviceptr_not_a_pointer)
22241 << 0 << RefExpr->getSourceRange();
22249 VD ? cast<DeclRefExpr>(SimpleRefExpr) :
nullptr);
22250 if (VDPrivate->isInvalidDecl())
22255 *
this, VDPrivate, RefExpr->getType().getUnqualifiedType(), ELoc);
22261 *
this, VDInit, RefExpr->
getType(), RefExpr->getExprLoc());
22271 MVLI.ProcessedVarList.push_back(VD ? RefExpr->IgnoreParens() : Ref);
22272 PrivateCopies.push_back(VDPrivateRefExpr);
22273 Inits.push_back(VDInitRefExpr);
22278 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_firstprivate, Ref);
22282 MVLI.VarBaseDeclarations.push_back(D);
22283 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
22284 MVLI.VarComponents.back().emplace_back(SimpleRefExpr, D,
22288 if (MVLI.ProcessedVarList.empty())
22292 Context, Locs, MVLI.ProcessedVarList, PrivateCopies, Inits,
22293 MVLI.VarBaseDeclarations, MVLI.VarComponents);
22298 MappableVarListInfo MVLI(VarList);
22300 for (
Expr *RefExpr : VarList) {
22301 assert(RefExpr &&
"NULL expr in OpenMP use_device_addr clause.");
22304 Expr *SimpleRefExpr = RefExpr;
22309 MVLI.ProcessedVarList.push_back(RefExpr);
22314 auto *VD = dyn_cast<VarDecl>(D);
22321 MVLI.ProcessedVarList.push_back(VD ? RefExpr->IgnoreParens() : Ref);
22326 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_firstprivate, Ref);
22330 MVLI.VarBaseDeclarations.push_back(D);
22331 MVLI.VarComponents.emplace_back();
22332 Expr *Component = SimpleRefExpr;
22333 if (VD && (isa<OMPArraySectionExpr>(RefExpr->IgnoreParenImpCasts()) ||
22334 isa<ArraySubscriptExpr>(RefExpr->IgnoreParenImpCasts())))
22336 MVLI.VarComponents.back().emplace_back(Component, D,
22340 if (MVLI.ProcessedVarList.empty())
22344 MVLI.VarBaseDeclarations,
22345 MVLI.VarComponents);
22350 MappableVarListInfo MVLI(VarList);
22351 for (
Expr *RefExpr : VarList) {
22352 assert(RefExpr &&
"NULL expr in OpenMP is_device_ptr clause.");
22355 Expr *SimpleRefExpr = RefExpr;
22359 MVLI.ProcessedVarList.push_back(RefExpr);
22369 Diag(ELoc, diag::err_omp_argument_type_isdeviceptr)
22370 << 0 << RefExpr->getSourceRange();
22376 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
22378 Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
22379 << getOpenMPClauseName(DVar.CKind)
22380 << getOpenMPClauseName(OMPC_is_device_ptr)
22381 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective());
22386 const Expr *ConflictExpr;
22387 if (
DSAStack->checkMappableExprComponentListsForDecl(
22392 ConflictExpr = R.front().getAssociatedExpression();
22395 Diag(ELoc, diag::err_omp_map_shared_storage) << RefExpr->getSourceRange();
22404 SimpleRefExpr, D,
false);
22405 DSAStack->addMappableExpressionComponents(
22406 D, MC, OMPC_is_device_ptr);
22409 MVLI.ProcessedVarList.push_back(SimpleRefExpr);
22414 assert((isa<DeclRefExpr>(SimpleRefExpr) ||
22415 isa<CXXThisExpr>(cast<MemberExpr>(SimpleRefExpr)->getBase())) &&
22416 "Unexpected device pointer expression!");
22417 MVLI.VarBaseDeclarations.push_back(
22418 isa<DeclRefExpr>(SimpleRefExpr) ? D :
nullptr);
22419 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
22420 MVLI.VarComponents.back().push_back(MC);
22423 if (MVLI.ProcessedVarList.empty())
22427 MVLI.VarBaseDeclarations,
22428 MVLI.VarComponents);
22444 DSAStack->getOMPAllocatorHandleT(),
22449 Allocator = AllocatorRes.
get();
22458 targetDiag(StartLoc, diag::err_expected_allocator_expression);
22462 for (
Expr *RefExpr : VarList) {
22463 assert(RefExpr &&
"NULL expr in OpenMP private clause.");
22466 Expr *SimpleRefExpr = RefExpr;
22470 Vars.push_back(RefExpr);
22476 auto *VD = dyn_cast<VarDecl>(D);
22481 ? RefExpr->IgnoreParens()
22489 DSAStack->addInnerAllocatorExpr(Allocator);
22491 ColonLoc, EndLoc, Vars);
22499 for (
Expr *RefExpr : VarList) {
22500 assert(RefExpr &&
"NULL expr in OpenMP nontemporal clause.");
22503 Expr *SimpleRefExpr = RefExpr;
22507 Vars.push_back(RefExpr);
22514 if (
const Expr *PrevRef =
22515 DSAStack->addUniqueNontemporal(D, SimpleRefExpr)) {
22516 Diag(ELoc, diag::err_omp_used_in_clause_twice)
22517 << 0 << getOpenMPClauseName(OMPC_nontemporal) << ERange;
22518 Diag(PrevRef->getExprLoc(), diag::note_omp_explicit_dsa)
22519 << getOpenMPClauseName(OMPC_nontemporal);
22523 Vars.push_back(RefExpr);
22538 for (
Expr *RefExpr : VarList) {
22539 assert(RefExpr &&
"NULL expr in OpenMP nontemporal clause.");
22542 Expr *SimpleRefExpr = RefExpr;
22547 Vars.push_back(RefExpr);
22552 const DSAStackTy::DSAVarData DVar =
22558 if (DVar.CKind != OMPC_reduction || DVar.Modifier != OMPC_REDUCTION_inscan)
22559 Diag(ELoc, diag::err_omp_inclusive_exclusive_not_reduction)
22560 << RefExpr->getSourceRange();
22562 if (
DSAStack->getParentDirective() != OMPD_unknown)
22563 DSAStack->markDeclAsUsedInScanDirective(D);
22564 Vars.push_back(RefExpr);
22578 for (
Expr *RefExpr : VarList) {
22579 assert(RefExpr &&
"NULL expr in OpenMP nontemporal clause.");
22582 Expr *SimpleRefExpr = RefExpr;
22587 Vars.push_back(RefExpr);
22593 DSAStackTy::DSAVarData DVar;
22594 if (ParentDirective != OMPD_unknown)
22595 DVar =
DSAStack->getTopDSA(D,
true);
22600 if (ParentDirective == OMPD_unknown || DVar.CKind != OMPC_reduction ||
22601 DVar.Modifier != OMPC_REDUCTION_inscan) {
22602 Diag(ELoc, diag::err_omp_inclusive_exclusive_not_reduction)
22603 << RefExpr->getSourceRange();
22605 DSAStack->markDeclAsUsedInScanDirective(D);
22607 Vars.push_back(RefExpr);
22618 QualType OMPAlloctraitT = Stack->getOMPAlloctraitT();
22619 if (!OMPAlloctraitT.
isNull())
22624 S.
Diag(Loc, diag::err_omp_implied_type_not_found) <<
"omp_alloctrait_t";
22627 Stack->setOMPAlloctraitT(PT.
get());
22646 for (
int I = 0; I < OMPAllocateDeclAttr::OMPUserDefinedMemAlloc; ++I) {
22647 auto AllocatorKind =
static_cast<OMPAllocateDeclAttr::AllocatorTypeTy
>(I);
22648 StringRef Allocator =
22649 OMPAllocateDeclAttr::ConvertAllocatorTypeTyToStr(AllocatorKind);
22657 Expr *AllocatorExpr =
nullptr;
22665 auto *DRE = dyn_cast<DeclRefExpr>(AllocatorExpr);
22666 bool IsPredefinedAllocator =
false;
22668 IsPredefinedAllocator = PredefinedAllocators.count(DRE->
getDecl());
22673 DSAStack->getOMPAllocatorHandleT(),
22675 (!IsPredefinedAllocator &&
22679 <<
"omp_allocator_handle_t" << (DRE ? 1 : 0)
22688 diag::err_omp_predefined_allocator_with_traits)
22691 << cast<NamedDecl>(DRE->
getDecl())->getName()
22700 diag::err_omp_nonpredefined_allocator_without_traits);
22708 IsPredefinedAllocator
22709 ? DSAStackTy::UsesAllocatorsDeclKind::PredefinedAllocator
22710 : DSAStackTy::UsesAllocatorsDeclKind::UserDefinedAllocator);
22712 Expr *AllocatorTraitsExpr =
nullptr;
22726 if (
const auto *ConstArrayTy = dyn_cast<ConstantArrayType>(Ty))
22727 TraitTy = ConstArrayTy->getElementType();
22734 diag::err_omp_expected_array_alloctraits)
22735 << AllocatorTraitsExpr->
getType();
22740 if (
auto *DRE = dyn_cast<DeclRefExpr>(AllocatorTraitsExpr))
22743 DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait);
22760 for (
Expr *RefExpr : Locators) {
22761 assert(RefExpr &&
"NULL expr in OpenMP shared clause.");
22762 if (isa<DependentScopeDeclRefExpr>(RefExpr) || RefExpr->isTypeDependent()) {
22764 Vars.push_back(RefExpr);
22772 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
22773 << 1 << 0 << RefExpr->getSourceRange();
22782 if (!Res.
isUsable() && !isa<OMPArraySectionExpr>(SimpleExpr) &&
22783 !isa<OMPArrayShapingExpr>(SimpleExpr)) {
22784 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
22785 << 1 << 0 << RefExpr->getSourceRange();
22788 Vars.push_back(SimpleExpr);
22792 EndLoc, Modifier, Vars);
22801 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
22804 << getOpenMPClauseName(OMPC_bind);